code
stringlengths 52
7.75k
| docs
stringlengths 1
5.85k
|
---|---|
def find_changelogs(session, name, candidates):
repos = filter_repo_urls(candidates=candidates)
# if we are lucky and there isn't a valid repo URL in our URL candidates, we need to go deeper
# and check the URLs if they contain a link to a repo
if not repos:
logger.info("No repo found, trying to find one on related sites {}".format(candidates))
repos = set(find_repo_urls(session, name, candidates))
urls = []
for repo in repos:
for url in find_changelog(session, repo):
if not contains_project_name(name, url):
logger.debug("Found changelog on {url}, but it does not contain the project name "
"{name}, ""aborting".format(name=name, url=url))
continue
urls.append(url)
if not urls:
# at this point we failed to fetch a changelog from plain files. we might find one on the
# github release page.
logger.debug("No plain changelog urls found, trying release page")
for repo in repos:
# make sure the link to the release page contains the project name
if contains_project_name(name, repo):
for url in find_release_page(session, repo):
urls.append(url)
return set(urls), repos | Tries to find changelogs on the given URL candidates
:param session: requests Session instance
:param name: str, project name
:param candidates: list, URL candidates
:return: tuple, (set(changelog URLs), set(repo URLs)) |
def find_git_repo(session, name, candidates):
repos = filter_repo_urls(candidates=candidates)
# if we are lucky and there isn't a valid repo URL in our URL candidates, we need to go deeper
# and check the URLs if they contain a link to a repo
if not repos:
logger.info("No repo found, trying to find one on related sites {}".format(candidates))
repos = set(find_repo_urls(session, name, candidates))
urls = []
for repo in repos:
username, reponame = repo.split("/")[3:5]
if "github.com" in repo:
urls.append(
"https://github.com/{username}/{reponame}.git".format(
username=username, reponame=reponame
)
)
elif "bitbucket.org" in repo:
urls.append(
"https://bitbucket.org/{username}/{reponame}".format(
username=username, reponame=reponame
)
)
return set(urls), repos | Tries to find git repos on the given URL candidates
:param session: requests Session instance
:param name: str, project name
:param candidates: list, URL candidates
:return: tuple, (set(git URLs), set(repo URLs)) |
def get_urls(session, name, data, find_changelogs_fn, **kwargs):
# if this package has valid meta data, build up a list of URL candidates we can possibly
# search for changelogs on
candidates = [
url for url in
[data.get(attr) for attr in (
"project_uri", "homepage_uri", "wiki_uri", "documentation_uri", "mailing_list_uri",
"source_code_uri", "bug_tracker_uri"
)]
if url
]
return find_changelogs_fn(session=session, name=name, candidates=candidates) | Gets URLs to changelogs.
:param session: requests Session instance
:param name: str, package name
:param data: dict, meta data
:param find_changelogs_fn: function, find_changelogs
:return: tuple, (set(changelog URLs), set(repo URLs)) |
def complete(text, state):
for cmd in COMMANDS:
if cmd.startswith(text):
if not state:
return cmd
else:
state -= 1 | Auto complete scss constructions in interactive mode. |
def readcfg(filepath, section):
cfg = cp.ConfigParser()
cfg.read(filepath)
if not cfg.has_section(section):
print('The section "{sec}" is not in the config file {file}.'
.format(sec=section,
file=filepath))
cfg = create_oedb_config_file(filepath, section)
return cfg | Reads the configuration file. If section is not available, calls
create_oedb_config_file to add the new section to an existing config.ini.
Parameters
----------
filepath : str
Absolute path of config file including the filename itself
section : str
Section in config file which contains connection details
Returns
-------
cfg : configparser.ConfigParser
Used for configuration file parser language. |
def get_connection_details(section):
print('Please enter your connection details:')
dialect = input('Enter input value for `dialect` (default: psycopg2): ') or 'psycopg2'
username = input('Enter value for `username`: ')
database = input('Enter value for `database`: ')
host = input('Enter value for `host`: ')
port = input('Enter value for `port` (default: 5432): ') or '5432'
cfg = cp.ConfigParser()
cfg.add_section(section)
cfg.set(section, 'dialect', dialect)
cfg.set(section, 'username', username)
cfg.set(section, 'host', host)
cfg.set(section, 'port', port)
cfg.set(section, 'database', database)
pw = getpass.getpass(prompt="Enter your password/token to " \
"store it in "
"keyring: ".format(database=section))
keyring.set_password(section, cfg.get(section, "username"), pw)
return cfg | Asks the user for the database connection details and returns them as a
ConfigParser-object.
Parameters
----------
None
Returns
-------
cfg : configparser.ConfigParser
Used for configuration file parser language. |
def connection(filepath=None, section='oep'):
# define default filepath if not provided
if filepath is None:
filepath = os.path.join(os.path.expanduser("~"), '.egoio', 'config.ini')
# does the file exist?
if not os.path.isfile(filepath):
print('DB config file {file} not found. '
'This might be the first run of the tool. '
.format(file=filepath))
cfg = create_oedb_config_file(filepath, section=section)
else:
cfg = readcfg(filepath, section)
try:
pw = cfg.get(section, "password")
except:
pw = keyring.get_password(section,
cfg.get(section, "username"))
if pw is None:
pw = getpass.getpass(prompt='No password found for database "{db}". '
'Enter your password to '
'store it in keyring: '
.format(db=cfg.get(section, 'database')))
keyring.set_password(section, cfg.get(section, "username"), pw)
# establish connection and return it
conn = create_engine(
"postgresql+{dialect}://{user}:{password}@{host}:{port}/{db}".format(
dialect=cfg.get(section, 'dialect', fallback='psycopg2'),
user=cfg.get(section, 'username'),
password=pw,
host=cfg.get(section, 'host'),
port=cfg.get(section, 'port'),
db=cfg.get(section, 'database')))
return conn | Instantiate a database connection (for the use with SQLAlchemy).
The keyword argument `filepath` specifies the location of the config file
that contains database connection information. If not given, the default
of `~/.egoio/config.ini` applies.
Parameters
----------
filepath : str
Absolute path of config file including the filename itself
Returns
-------
conn : sqlalchemy.engine
SQLalchemy engine object containing the connection details |
def get_url_map():
map = {}
path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), # current working dir ../
"custom", # ../custom/
"pypi", # ../custom/pypi/
"map.txt" # ../custom/pypi/map.txt
)
with open(path) as f:
for line in f.readlines():
package, url = line.strip().split(": ")
map[package] = url
return map | Loads custom/pypi/map.txt and builds a dict where map[package_name] = url
:return: dict, urls |
def get_urls(session, name, data, find_changelogs_fn, **kwargs):
# check if there's a changelog in ../custom/pypi/map.txt
map = get_url_map()
if name.lower().replace("_", "-") in map:
logger.info("Package {name}'s URL is in pypi/map.txt, returning".format(name=name))
return [map[name.lower().replace("_", "-")]], set()
# if this package has valid meta data, build up a list of URL candidates we can possibly
# search for changelogs on
if "info" in data:
# add all URLs in pypi's meta data:
# {
# "info": {
# "home_page":
# "docs_url":
# "bugtrack_url":
# }
# }
candidates = [
url for url in
[data["info"].get(attr) for attr in ("home_page", "docs_url", "bugtrack_url")]
if url
]
# the latest release page on pypi might also contain links, add it
candidates.append("https://pypi.python.org/pypi/{name}/{latest_release}".format(
name=name,
latest_release=next(iter(get_releases(data)))
))
# Check the download URL as well.
if "download_url" in data:
candidates.append(data["download_url"])
if data['info']['description']:
candidates.extend(changelogs.url_re.findall(data["info"]["description"]))
return find_changelogs_fn(session=session, name=name, candidates=candidates)
return set(), set() | Gets URLs to changelogs.
:param session: requests Session instance
:param name: str, package name
:param data: dict, meta data
:param find_changelogs_fn: function, find_changelogs
:return: tuple, (set(changelog URLs), set(repo URLs)) |
def validate_args(self):
from ..mixins import ModelMixin
for arg in ("instance", "decider", "identifier", "fields", "default_language"):
if getattr(self, arg) is None:
raise AttributeError("%s must not be None" % arg)
if not isinstance(self.instance, (ModelMixin,)):
raise ImproperlyConfigured('"instance" argument must be a Linguist model')
if not issubclass(self.decider, (models.Model,)):
raise ImproperlyConfigured(
'"decider" argument must be a valid Django model'
) | Validates arguments. |
def active_language(self):
# Current instance language (if user uses activate_language() method)
if self._language is not None:
return self._language
# Current site language (translation.get_language())
current = utils.get_language()
if current in self.supported_languages:
return current
# Default language descriptor
return self.default_language | Returns active language. |
def translation_instances(self):
return [
instance
for k, v in six.iteritems(self.instance._linguist_translations)
for instance in v.values()
] | Returns translation instances. |
def get_cache(
self,
instance,
translation=None,
language=None,
field_name=None,
field_value=None,
):
is_new = bool(instance.pk is None)
try:
cached_obj = instance._linguist_translations[field_name][language]
if not cached_obj.field_name:
cached_obj.field_name = field_name
if not cached_obj.language:
cached_obj.language = language
if not cached_obj.identifier:
cached_obj.identifier = self.instance.linguist_identifier
except KeyError:
cached_obj = None
if not is_new:
if translation is None:
try:
translation = self.decider.objects.get(
identifier=self.instance.linguist_identifier,
object_id=self.instance.pk,
language=language,
field_name=field_name,
)
except self.decider.DoesNotExist:
pass
if cached_obj is None:
if translation is not None:
cached_obj = CachedTranslation.from_object(translation)
else:
cached_obj = CachedTranslation(
instance=instance,
language=language,
field_name=field_name,
field_value=field_value,
)
instance._linguist_translations[cached_obj.field_name][
cached_obj.language
] = cached_obj
return cached_obj | Returns translation from cache. |
def set_cache(
self,
instance=None,
translation=None,
language=None,
field_name=None,
field_value=None,
):
if instance is not None and translation is not None:
cached_obj = CachedTranslation.from_object(translation)
instance._linguist_translations[translation.field_name][
translation.language
] = cached_obj
return cached_obj
if instance is None:
instance = self.instance
cached_obj = self.get_cache(
instance,
translation=translation,
field_value=field_value,
language=language,
field_name=field_name,
)
if field_value is None and cached_obj.field_value:
cached_obj.deleted = True
if field_value != cached_obj.field_value:
cached_obj.has_changed = True
cached_obj.field_value = field_value
return cached_obj | Add a new translation into the cache. |
def _filter_or_exclude(self, negate, *args, **kwargs):
from .models import Translation
new_args = self.get_cleaned_args(args)
new_kwargs = self.get_cleaned_kwargs(kwargs)
translation_args = self.get_translation_args(args)
translation_kwargs = self.get_translation_kwargs(kwargs)
has_linguist_args = self.has_linguist_args(args)
has_linguist_kwargs = self.has_linguist_kwargs(kwargs)
if translation_args or translation_kwargs:
ids = list(
set(
Translation.objects.filter(
*translation_args, **translation_kwargs
).values_list("object_id", flat=True)
)
)
if ids:
new_kwargs["id__in"] = ids
has_kwargs = has_linguist_kwargs and not (new_kwargs or new_args)
has_args = has_linguist_args and not (new_args or new_kwargs)
# No translations but we looked for translations?
# Returns empty queryset.
if has_kwargs or has_args:
return self._clone().none()
return super(QuerySetMixin, self)._filter_or_exclude(
negate, *new_args, **new_kwargs
) | Overrides default behavior to handle linguist fields. |
def _get_concrete_fields_with_model(self):
return [
(f, f.model if f.model != self.model else None)
for f in self.model._meta.get_fields()
if f.concrete
and (
not f.is_relation or f.one_to_one or (f.many_to_one and f.related_model)
)
] | For compatibility with Django<=1.10. Replace old
`_meta.get_concrete_fields_with_model`.
https://docs.djangoproject.com/en/1.10/ref/models/meta/ |
def linguist_field_names(self):
return list(self.model._linguist.fields) + list(
utils.get_language_fields(self.model._linguist.fields)
) | Returns linguist field names (example: "title" and "title_fr"). |
def has_linguist_kwargs(self, kwargs):
for k in kwargs:
if self.is_linguist_lookup(k):
return True
return False | Parses the given kwargs and returns True if they contain
linguist lookups. |
def has_linguist_args(self, args):
linguist_args = []
for arg in args:
condition = self._get_linguist_condition(arg)
if condition:
linguist_args.append(condition)
return bool(linguist_args) | Parses the given args and returns True if they contain
linguist lookups. |
def get_translation_args(self, args):
translation_args = []
for arg in args:
condition = self._get_linguist_condition(arg, transform=True)
if condition:
translation_args.append(condition)
return translation_args | Returns linguist args from model args. |
def get_translation_kwargs(self, kwargs):
lks = []
for k, v in six.iteritems(kwargs):
if self.is_linguist_lookup(k):
lks.append(
utils.get_translation_lookup(self.model._linguist.identifier, k, v)
)
translation_kwargs = {}
for lk in lks:
for k, v in six.iteritems(lk):
if k not in translation_kwargs:
translation_kwargs[k] = v
return translation_kwargs | Returns linguist lookup kwargs (related to Translation model). |
def is_linguist_lookup(self, lookup):
field = utils.get_field_name_from_lookup(lookup)
# To keep default behavior with "FieldError: Cannot resolve keyword".
if (
field not in self.concrete_field_names
and field in self.linguist_field_names
):
return True
return False | Returns true if the given lookup is a valid linguist lookup. |
def _get_linguist_condition(self, condition, reverse=False, transform=False):
# We deal with a node
if isinstance(condition, Q):
children = []
for child in condition.children:
parsed = self._get_linguist_condition(
condition=child, reverse=reverse, transform=transform
)
if parsed is not None:
if (isinstance(parsed, Q) and parsed.children) or isinstance(
parsed, tuple
):
children.append(parsed)
new_condition = copy.deepcopy(condition)
new_condition.children = children
return new_condition
# We are dealing with a lookup ('field', 'value').
lookup, value = condition
is_linguist = self.is_linguist_lookup(lookup)
if transform and is_linguist:
return Q(
**utils.get_translation_lookup(
self.model._linguist.identifier, lookup, value
)
)
if (reverse and not is_linguist) or (not reverse and is_linguist):
return condition | Parses Q tree and returns linguist lookups or model lookups
if reverse is True. |
def get_cleaned_args(self, args):
if not args:
return args
cleaned_args = []
for arg in args:
condition = self._get_linguist_condition(arg, True)
if condition:
cleaned_args.append(condition)
return cleaned_args | Returns positional arguments for related model query. |
def get_cleaned_kwargs(self, kwargs):
cleaned_kwargs = kwargs.copy()
if kwargs is not None:
for k in kwargs:
if self.is_linguist_lookup(k):
del cleaned_kwargs[k]
return cleaned_kwargs | Returns concrete field lookups. |
def with_translations(self, **kwargs):
force = kwargs.pop("force", False)
if self._prefetch_translations_done and force is False:
return self
self._prefetched_translations_cache = utils.get_grouped_translations(
self, **kwargs
)
self._prefetch_translations_done = True
return self._clone() | Prefetches translations.
Takes three optional keyword arguments:
* ``field_names``: ``field_name`` values for SELECT IN
* ``languages``: ``language`` values for SELECT IN
* ``chunks_length``: fetches IDs by chunk |
def available_languages(self):
from .models import Translation
return (
Translation.objects.filter(
identifier=self.linguist_identifier, object_id=self.pk
)
.values_list("language", flat=True)
.distinct()
.order_by("language")
) | Returns available languages. |
def get_translations(self, language=None):
from .models import Translation
if not self.pk:
return Translation.objects.none()
return Translation.objects.get_translations(obj=self, language=language) | Returns available (saved) translations for this instance. |
def delete_translations(self, language=None):
from .models import Translation
return Translation.objects.delete_translations(obj=self, language=language) | Deletes related translations. |
def override_language(self, language):
previous_language = self._linguist.language
self._linguist.language = language
yield
self._linguist.language = previous_language | Context manager to override the instance language. |
def _save_table(
self,
raw=False,
cls=None,
force_insert=False,
force_update=False,
using=None,
update_fields=None,
):
updated = super(ModelMixin, self)._save_table(
raw=raw,
cls=cls,
force_insert=force_insert,
force_update=force_update,
using=using,
update_fields=update_fields,
)
self._linguist.decider.objects.save_translations([self])
return updated | Overwrites model's ``_save_table`` method to save translations after instance
has been saved (required to retrieve the object ID for ``Translation``
model).
Preferred over overriding the object's ``save`` method
to ensure that `pre_save` and ``post_save`` signals happen
respectively before and after the translations have been saved to the database.
Thus ``pre_save`` signals have access to the ``has_changed`` attribute on translated fields
before the translations are saved and the attribute is reset.
And `post_save`` signals always have access to the updated translations. |
def validate_meta(meta):
if not isinstance(meta, (dict,)):
raise TypeError('Model Meta "linguist" must be a dict')
required_keys = ("identifier", "fields")
for key in required_keys:
if key not in meta:
raise KeyError('Model Meta "linguist" dict requires %s to be defined', key)
if not isinstance(meta["fields"], (list, tuple)):
raise ImproperlyConfigured(
"Linguist Meta's fields attribute must be a list or tuple"
) | Validates Linguist Meta attribute. |
def default_value_getter(field):
def default_value_func_getter(self):
localized_field = utils.build_localized_field_name(
field, self._linguist.active_language
)
value = getattr(self, localized_field)
if value:
return value
default_field = utils.build_localized_field_name(field, self.default_language)
return getattr(self, default_field)
return default_value_func_getter | When accessing to the name of the field itself, the value
in the current language will be returned. Unless it's set,
the value in the default language will be returned. |
def default_value_setter(field):
def default_value_func_setter(self, value):
localized_field = utils.build_localized_field_name(
field, self._linguist.active_language
)
setattr(self, localized_field, value)
return default_value_func_setter | When setting to the name of the field itself, the value
in the current language will be set. |
def field_factory(base_class):
from .fields import TranslationField
class TranslationFieldField(TranslationField, base_class):
pass
TranslationFieldField.__name__ = "Translation%s" % base_class.__name__
return TranslationFieldField | Takes a field base class and wrap it with ``TranslationField`` class. |
def create_translation_field(translated_field, language):
cls_name = translated_field.__class__.__name__
if not isinstance(translated_field, tuple(SUPPORTED_FIELDS.keys())):
raise ImproperlyConfigured("%s is not supported by Linguist." % cls_name)
translation_class = field_factory(translated_field.__class__)
kwargs = get_translation_class_kwargs(translated_field.__class__)
return translation_class(
translated_field=translated_field, language=language, **kwargs
) | Takes the original field, a given language, a decider model and return a
Field class for model. |
def connect(self):
'''
Connect to the drone.
:raises RuntimeError: if the drone is connected or closed already.
'''
if self.connected:
raise RuntimeError(
'{} is connected already'.format(self.__class__.__name__))
if self.closed:
raise RuntimeError(
'{} is closed already'.format(self.__class__.__name__))
self.connected = True
self._connect(f connect(self):
'''
Connect to the drone.
:raises RuntimeError: if the drone is connected or closed already.
'''
if self.connected:
raise RuntimeError(
'{} is connected already'.format(self.__class__.__name__))
if self.closed:
raise RuntimeError(
'{} is closed already'.format(self.__class__.__name__))
self.connected = True
self._connect() | Connect to the drone.
:raises RuntimeError: if the drone is connected or closed already. |
def close(self):
'''
Exit all threads and disconnect the drone.
This method has no effect if the drone is closed already or not
connected yet.
'''
if not self.connected:
return
if self.closed:
return
self.closed = True
self._close(f close(self):
'''
Exit all threads and disconnect the drone.
This method has no effect if the drone is closed already or not
connected yet.
'''
if not self.connected:
return
if self.closed:
return
self.closed = True
self._close() | Exit all threads and disconnect the drone.
This method has no effect if the drone is closed already or not
connected yet. |
def _set_flags(self, **flags):
'''
Set the flags of this argument.
Example: ``int_param._set_flags(a=1, b=2, c=4, d=8)``
'''
self._flags = enum.IntEnum('_flags', flags)
self.__dict__.update(self._flags.__members__)
self._patch_flag_doc(f _set_flags(self, **flags):
'''
Set the flags of this argument.
Example: ``int_param._set_flags(a=1, b=2, c=4, d=8)``
'''
self._flags = enum.IntEnum('_flags', flags)
self.__dict__.update(self._flags.__members__)
self._patch_flag_doc() | Set the flags of this argument.
Example: ``int_param._set_flags(a=1, b=2, c=4, d=8)`` |
def delete_translations(sender, instance, **kwargs):
if issubclass(sender, (ModelMixin,)):
instance._linguist.decider.objects.filter(
identifier=instance.linguist_identifier, object_id=instance.pk
).delete() | Deletes related instance's translations when instance is deleted. |
def draw_tree(node,
child_iter=lambda n: n.children,
text_str=str):
return LeftAligned(traverse=Traversal(get_text=text_str,
get_children=child_iter),
draw=LegacyStyle())(node) | Support asciitree 0.2 API.
This function solely exist to not break old code (using asciitree 0.2).
Its use is deprecated. |
def render(self, node):
lines = []
children = self.traverse.get_children(node)
lines.append(self.draw.node_label(self.traverse.get_text(node)))
for n, child in enumerate(children):
child_tree = self.render(child)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(self.draw.last_child_head(child_tree.pop(0)))
lines.extend(self.draw.last_child_tail(l)
for l in child_tree)
else:
lines.append(self.draw.child_head(child_tree.pop(0)))
lines.extend(self.draw.child_tail(l)
for l in child_tree)
return lines | Renders a node. This function is used internally, as it returns
a list of lines. Use :func:`~asciitree.LeftAligned.__call__` instead. |
def get_language():
lang = _get_language()
if not lang:
return get_fallback_language()
langs = [l[0] for l in settings.SUPPORTED_LANGUAGES]
if lang not in langs and "-" in lang:
lang = lang.split("-")[0]
if lang in langs:
return lang
return settings.DEFAULT_LANGUAGE | Returns an active language code that is guaranteed to be in
settings.SUPPORTED_LANGUAGES. |
def activate_language(instances, language):
language = (
language if language in get_supported_languages() else get_fallback_language()
)
for instance in instances:
instance.activate_language(language) | Activates the given language for the given instances. |
def load_class(class_path, setting_name=None):
if not isinstance(class_path, six.string_types):
try:
class_path, app_label = class_path
except:
if setting_name:
raise exceptions.ImproperlyConfigured(
CLASS_PATH_ERROR % (setting_name, setting_name)
)
else:
raise exceptions.ImproperlyConfigured(
CLASS_PATH_ERROR % ("this setting", "It")
)
try:
class_module, class_name = class_path.rsplit(".", 1)
except ValueError:
if setting_name:
txt = "%s isn't a valid module. Check your %s setting" % (
class_path,
setting_name,
)
else:
txt = "%s isn't a valid module." % class_path
raise exceptions.ImproperlyConfigured(txt)
try:
mod = import_module(class_module)
except ImportError as e:
if setting_name:
txt = 'Error importing backend %s: "%s". Check your %s setting' % (
class_module,
e,
setting_name,
)
else:
txt = 'Error importing backend %s: "%s".' % (class_module, e)
raise exceptions.ImproperlyConfigured(txt)
try:
clazz = getattr(mod, class_name)
except AttributeError:
if setting_name:
txt = (
'Backend module "%s" does not define a "%s" class. Check'
" your %s setting" % (class_module, class_name, setting_name)
)
else:
txt = 'Backend module "%s" does not define a "%s" class.' % (
class_module,
class_name,
)
raise exceptions.ImproperlyConfigured(txt)
return clazz | Loads a class given a class_path. The setting value may be a string or a
tuple. The setting_name parameter is only there for pretty error output, and
therefore is optional. |
def get_model_string(model_name):
setting_name = "LINGUIST_%s_MODEL" % model_name.upper().replace("_", "")
class_path = getattr(settings, setting_name, None)
if not class_path:
return "linguist.%s" % model_name
elif isinstance(class_path, basestring):
parts = class_path.split(".")
try:
index = parts.index("models") - 1
except ValueError:
raise exceptions.ImproperlyConfigured(
CLASS_PATH_ERROR % (setting_name, setting_name)
)
app_label, model_name = parts[index], parts[-1]
else:
try:
class_path, app_label = class_path
model_name = class_path.split(".")[-1]
except:
raise exceptions.ImproperlyConfigured(
CLASS_PATH_ERROR % (setting_name, setting_name)
)
return "%s.%s" % (app_label, model_name) | Returns the model string notation Django uses for lazily loaded ForeignKeys
(eg 'auth.User') to prevent circular imports.
This is needed to allow our crazy custom model usage. |
def get_translation_lookup(identifier, field, value):
# Split by transformers
parts = field.split("__")
# Store transformers
transformers = parts[1:] if len(parts) > 1 else None
# defaults to "title" and default language
field_name = parts[0]
language = get_fallback_language()
name_parts = parts[0].split("_")
if len(name_parts) > 1:
supported_languages = get_supported_languages()
last_part = name_parts[-1]
if last_part in supported_languages:
# title_with_underscore_fr?
field_name = "_".join(name_parts[:-1])
language = last_part
else:
# title_with_underscore?
# Let's use default language
field_name = "_".join(name_parts)
value_lookup = (
"field_value"
if transformers is None
else "field_value__%s" % "__".join(transformers)
)
lookup = {"field_name": field_name, "identifier": identifier, "language": language}
lookup[value_lookup] = value
return lookup | Mapper that takes a language field, its value and returns the
related lookup for Translation model. |
def get_grouped_translations(instances, **kwargs):
grouped_translations = collections.defaultdict(list)
if not instances:
return grouped_translations
if not isinstance(instances, collections.Iterable):
instances = [instances]
if isinstance(instances, QuerySet):
model = instances.model
else:
model = instances[0]._meta.model
instances_ids = []
for instance in instances:
instances_ids.append(instance.pk)
if instance._meta.model != model:
raise Exception(
"You cannot use different model instances, only one authorized."
)
from .models import Translation
from .mixins import ModelMixin
decider = model._meta.linguist.get("decider", Translation)
identifier = model._meta.linguist.get("identifier", None)
chunks_length = kwargs.get("chunks_length", None)
populate_missing = kwargs.get("populate_missing", True)
if identifier is None:
raise Exception('You must define Linguist "identifier" meta option')
lookup = dict(identifier=identifier)
for kwarg in ("field_names", "languages"):
value = kwargs.get(kwarg, None)
if value is not None:
if not isinstance(value, (list, tuple)):
value = [value]
lookup["%s__in" % kwarg[:-1]] = value
if chunks_length is not None:
translations_qs = []
for ids in utils.chunks(instances_ids, chunks_length):
ids_lookup = copy.copy(lookup)
ids_lookup["object_id__in"] = ids
translations_qs.append(decider.objects.filter(**ids_lookup))
translations = itertools.chain.from_iterable(translations_qs)
else:
lookup["object_id__in"] = instances_ids
translations = decider.objects.filter(**lookup)
for translation in translations:
grouped_translations[translation.object_id].append(translation)
return grouped_translations | Takes instances and returns grouped translations ready to
be set in cache. |
def every(secs):
'''
Generator that yields for every *secs* seconds.
Example:
>>> for _ in every(0.1):
... print('Hello')
You get ``Hello`` output every 0.1 seconds.
'''
time_stated = time.monotonic()
while True:
time_yielded = time.monotonic()
yield time_yielded - time_stated
time.sleep(max(0, secs + time_yielded - time.monotonic())f every(secs):
'''
Generator that yields for every *secs* seconds.
Example:
>>> for _ in every(0.1):
... print('Hello')
You get ``Hello`` output every 0.1 seconds.
'''
time_stated = time.monotonic()
while True:
time_yielded = time.monotonic()
yield time_yielded - time_stated
time.sleep(max(0, secs + time_yielded - time.monotonic())) | Generator that yields for every *secs* seconds.
Example:
>>> for _ in every(0.1):
... print('Hello')
You get ``Hello`` output every 0.1 seconds. |
def get_free_udp_port():
'''
Get a free UDP port.
Note this is vlunerable to race conditions.
'''
import socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('localhost', 0))
addr = sock.getsockname()
sock.close()
return addr[1f get_free_udp_port():
'''
Get a free UDP port.
Note this is vlunerable to race conditions.
'''
import socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('localhost', 0))
addr = sock.getsockname()
sock.close()
return addr[1] | Get a free UDP port.
Note this is vlunerable to race conditions. |
def get_available_languages(self, obj):
return obj.available_languages if obj is not None else self.model.objects.none() | Returns available languages for current object. |
def languages_column(self, obj):
languages = self.get_available_languages(obj)
return '<span class="available-languages">{0}</span>'.format(
" ".join(languages)
) | Adds languages columns. |
def prefetch_translations(instances, **kwargs):
from .mixins import ModelMixin
if not isinstance(instances, collections.Iterable):
instances = [instances]
populate_missing = kwargs.get("populate_missing", True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations()
for instance in instances:
if (
issubclass(instance.__class__, ModelMixin)
and instance.pk in grouped_translations
):
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation)
if populate_missing:
instance.populate_missing_translations() | Prefetches translations for the given instances.
Can be useful for a list of instances. |
def get_translations(self, obj, language=None):
lookup = {"identifier": obj.linguist_identifier, "object_id": obj.pk}
if language is not None:
lookup["language"] = language
return self.get_queryset().filter(**lookup) | Shorcut method to retrieve translations for a given object. |
def save_translations(self, instances):
if not isinstance(instances, (list, tuple)):
instances = [instances]
for instance in instances:
translations = []
for obj in instance._linguist.translation_instances:
if obj.field_name:
obj.object_id = instance.pk
if (obj.is_new and obj.field_value) or (
obj.has_changed and not obj.is_new
):
field = instance.get_field_object(obj.field_name, obj.language)
if hasattr(field, "pre_save") and callable(field.pre_save):
obj.field_value = field.pre_save(instance, True)
translations.append(obj)
to_create = [
(obj, self.model(**obj.attrs))
for obj in translations
if obj.is_new and obj.field_value
]
to_update = [
obj for obj in translations if obj.has_changed and not obj.is_new
]
to_delete = [obj for obj in translations if obj.deleted]
created = True
if to_create:
objects = [obj for cached, obj in to_create]
try:
with transaction.atomic():
self.bulk_create(objects)
except IntegrityError:
created = False
if to_update:
for obj in to_update:
self.filter(**obj.lookup).update(field_value=obj.field_value)
obj.has_changed = False
if created:
for cached, obj in to_create:
cached.is_new = False
cached.has_changed = False
if to_delete:
for obj in to_delete:
self.filter(**obj.lookup).delete()
obj.has_changed = False | Saves cached translations (cached in model instances as dictionaries). |
def _pack(self, seq='SEQUNSET'):
'''
Packs the command into *bytes*
:param seq: sequence number
:rtype: bytes
'''
return 'AT*{clsname}={seq}{argl_wc}\r'.format(
clsname=type(self).__name__,
seq=seq,
argl_wc=b''.join(self._iter_packed_with_comma()).decode()
).encode(f _pack(self, seq='SEQUNSET'):
'''
Packs the command into *bytes*
:param seq: sequence number
:rtype: bytes
'''
return 'AT*{clsname}={seq}{argl_wc}\r'.format(
clsname=type(self).__name__,
seq=seq,
argl_wc=b''.join(self._iter_packed_with_comma()).decode()
).encode() | Packs the command into *bytes*
:param seq: sequence number
:rtype: bytes |
def takeoff(self):
'''
Sends the takeoff command.
'''
self.send(at.REF(at.REF.input.start)f takeoff(self):
'''
Sends the takeoff command.
'''
self.send(at.REF(at.REF.input.start)) | Sends the takeoff command. |
def emergency(self):
'''
Sends the emergency command.
'''
self.send(at.REF(at.REF.input.select)f emergency(self):
'''
Sends the emergency command.
'''
self.send(at.REF(at.REF.input.select)) | Sends the emergency command. |
def _move(self, roll=0, pitch=0, gaz=0, yaw=0):
'''
Same as sending :py:class:`~pyardrone.at.PCMD` command with progressive
flag.
'''
self.send(at.PCMD(at.PCMD.flag.progressive, roll, pitch, gaz, yaw)f _move(self, roll=0, pitch=0, gaz=0, yaw=0):
'''
Same as sending :py:class:`~pyardrone.at.PCMD` command with progressive
flag.
'''
self.send(at.PCMD(at.PCMD.flag.progressive, roll, pitch, gaz, yaw)) | Same as sending :py:class:`~pyardrone.at.PCMD` command with progressive
flag. |
def encode(number, checksum=False, split=0):
number = int(number)
if number < 0:
raise ValueError("number '%d' is not a positive integer" % number)
split = int(split)
if split < 0:
raise ValueError("split '%d' is not a positive integer" % split)
check_symbol = ''
if checksum:
check_symbol = encode_symbols[number % check_base]
if number == 0:
return '0' + check_symbol
symbol_string = ''
while number > 0:
remainder = number % base
number //= base
symbol_string = encode_symbols[remainder] + symbol_string
symbol_string = symbol_string + check_symbol
if split:
chunks = []
for pos in range(0, len(symbol_string), split):
chunks.append(symbol_string[pos:pos + split])
symbol_string = '-'.join(chunks)
return symbol_string | Encode an integer into a symbol string.
A ValueError is raised on invalid input.
If checksum is set to True, a check symbol will be
calculated and appended to the string.
If split is specified, the string will be divided into
clusters of that size separated by hyphens.
The encoded string is returned. |
def decode(symbol_string, checksum=False, strict=False):
symbol_string = normalize(symbol_string, strict=strict)
if checksum:
symbol_string, check_symbol = symbol_string[:-1], symbol_string[-1]
number = 0
for symbol in symbol_string:
number = number * base + decode_symbols[symbol]
if checksum:
check_value = decode_symbols[check_symbol]
modulo = number % check_base
if check_value != modulo:
raise ValueError("invalid check symbol '%s' for string '%s'" %
(check_symbol, symbol_string))
return number | Decode an encoded symbol string.
If checksum is set to True, the string is assumed to have a
trailing check symbol which will be validated. If the
checksum validation fails, a ValueError is raised.
If strict is set to True, a ValueError is raised if the
normalization step requires changes to the string.
The decoded string is returned. |
def normalize(symbol_string, strict=False):
if isinstance(symbol_string, string_types):
if not PY3:
try:
symbol_string = symbol_string.encode('ascii')
except UnicodeEncodeError:
raise ValueError("string should only contain ASCII characters")
else:
raise TypeError("string is of invalid type %s" %
symbol_string.__class__.__name__)
norm_string = symbol_string.replace('-', '').translate(normalize_symbols).upper()
if not valid_symbols.match(norm_string):
raise ValueError("string '%s' contains invalid characters" % norm_string)
if strict and norm_string != symbol_string:
raise ValueError("string '%s' requires normalization" % symbol_string)
return norm_string | Normalize an encoded symbol string.
Normalization provides error correction and prepares the
string for decoding. These transformations are applied:
1. Hyphens are removed
2. 'I', 'i', 'L' or 'l' are converted to '1'
3. 'O' or 'o' are converted to '0'
4. All characters are converted to uppercase
A TypeError is raised if an invalid string type is provided.
A ValueError is raised if the normalized string contains
invalid characters.
If the strict parameter is set to True, a ValueError is raised
if any of the above transformations are applied.
The normalized string is returned. |
def _get_translation_field_names():
from .models import Translation
fields = [f.name for f in Translation._meta.get_fields()]
fields.remove("id")
return fields | Returns Translation base model field names (excepted "id" field). |
def send(self, command, *, log=True):
'''
:param pyardrone.at.base.ATCommand command: command to send
Sends the command to the drone,
with an internal increasing sequence number.
this method is thread-safe.
'''
with self.sequence_number_mutex:
self.sequence_number += 1
packed = command._pack(self.sequence_number)
self.send_bytes(packed, log=logf send(self, command, *, log=True):
'''
:param pyardrone.at.base.ATCommand command: command to send
Sends the command to the drone,
with an internal increasing sequence number.
this method is thread-safe.
'''
with self.sequence_number_mutex:
self.sequence_number += 1
packed = command._pack(self.sequence_number)
self.send_bytes(packed, log=log) | :param pyardrone.at.base.ATCommand command: command to send
Sends the command to the drone,
with an internal increasing sequence number.
this method is thread-safe. |
def setup_requires():
from pkg_resources import parse_version
required = ['cython>=0.24.0']
numpy_requirement = 'numpy>=1.7.1'
try:
import numpy
except Exception:
required.append(numpy_requirement)
else:
if parse_version(numpy.__version__) < parse_version('1.7.1'):
required.append(numpy_requirement)
return required | Return required packages
Plus any version tests and warnings |
def _build_block_context(template, context):
# Ensure there's a BlockContext before rendering. This allows blocks in
# ExtendsNodes to be found by sub-templates (allowing {{ block.super }} and
# overriding sub-blocks to work).
if BLOCK_CONTEXT_KEY not in context.render_context:
context.render_context[BLOCK_CONTEXT_KEY] = BlockContext()
block_context = context.render_context[BLOCK_CONTEXT_KEY]
for node in template.nodelist:
if isinstance(node, ExtendsNode):
compiled_parent = node.get_parent(context)
# Add the parent node's blocks to the context. (This ends up being
# similar logic to ExtendsNode.render(), where we're adding the
# parent's blocks to the context so a child can find them.)
block_context.add_blocks(
{n.name: n for n in compiled_parent.nodelist.get_nodes_by_type(BlockNode)})
_build_block_context(compiled_parent, context)
return compiled_parent
# The ExtendsNode has to be the first non-text node.
if not isinstance(node, TextNode):
break | Populate the block context with BlockNodes from parent templates. |
def _render_template_block_nodelist(nodelist, block_name, context):
# Attempt to find the wanted block in the current template.
for node in nodelist:
# If the wanted block was found, return it.
if isinstance(node, BlockNode):
# No matter what, add this block to the rendering context.
context.render_context[BLOCK_CONTEXT_KEY].push(node.name, node)
# If the name matches, you're all set and we found the block!
if node.name == block_name:
return node.render(context)
# If a node has children, recurse into them. Based on
# django.template.base.Node.get_nodes_by_type.
for attr in node.child_nodelists:
try:
new_nodelist = getattr(node, attr)
except AttributeError:
continue
# Try to find the block recursively.
try:
return _render_template_block_nodelist(new_nodelist, block_name, context)
except BlockNotFound:
continue
# The wanted block_name was not found.
raise BlockNotFound("block with name '%s' does not exist" % block_name) | Recursively iterate over a node to find the wanted block. |
def render_block_to_string(template_name, block_name, context=None):
# Like render_to_string, template_name can be a string or a list/tuple.
if isinstance(template_name, (tuple, list)):
t = loader.select_template(template_name)
else:
t = loader.get_template(template_name)
# Create the context instance.
context = context or {}
# The Django backend.
if isinstance(t, DjangoTemplate):
return django_render_block(t, block_name, context)
elif isinstance(t, Jinja2Template):
from render_block.jinja2 import jinja2_render_block
return jinja2_render_block(t, block_name, context)
else:
raise UnsupportedEngine(
'Can only render blocks from the Django template backend.') | Loads the given template_name and renders the given block with the given
dictionary as context. Returns a string.
template_name
The name of the template to load and render. If it's a list of
template names, Django uses select_template() instead of
get_template() to find the template. |
def get_host_path(root, path, instance=None):
r_val = resolve_value(path)
if isinstance(r_val, dict):
r_instance = instance or 'default'
r_path = resolve_value(r_val.get(r_instance))
if not r_path:
raise ValueError("No path defined for instance {0}.".format(r_instance))
else:
r_path = r_val
r_root = resolve_value(root)
if r_path and r_root and (r_path[0] != posixpath.sep):
return posixpath.join(r_root, r_path)
return r_path | Generates the host path for a container volume. If the given path is a dictionary, uses the entry of the instance
name.
:param root: Root path to prepend, if ``path`` does not already describe an absolute path.
:type root: unicode | str | AbstractLazyObject
:param path: Path string or dictionary of per-instance paths.
:type path: unicode | str | dict | AbstractLazyObject
:param instance: Optional instance name.
:type instance: unicode | str
:return: Path on the host that is mapped to the container volume.
:rtype: unicode | str |
def from_client(cls, client):
if hasattr(client, 'client_configuration'):
return client.client_configuration
kwargs = {'client': client}
for attr in cls.init_kwargs:
if hasattr(client, attr):
kwargs[attr] = getattr(client, attr)
if hasattr(client, 'api_version'):
kwargs['version'] = client.api_version
return cls(**kwargs) | Constructs a configuration object from an existing client instance. If the client has already been created with
a configuration object, returns that instance.
:param client: Client object to derive the configuration from.
:type client: docker.client.Client
:return: ClientConfiguration |
def get_init_kwargs(self):
init_kwargs = {}
for k in self.init_kwargs:
if k in self.core_property_set:
init_kwargs[k] = getattr(self, k)
elif k in self:
init_kwargs[k] = self[k]
return init_kwargs | Generates keyword arguments for creating a new Docker client instance.
:return: Keyword arguments as defined through this configuration.
:rtype: dict |
def get_client(self):
client = self._client
if not client:
self._client = client = self.client_constructor(**self.get_init_kwargs())
client.client_configuration = self
# Client might update the version number after construction.
updated_version = getattr(client, 'api_version', None)
if updated_version:
self.version = updated_version
return client | Retrieves or creates a client instance from this configuration object. If instantiated from this configuration,
the resulting object is also cached in the property ``client`` and a reference to this configuration is stored
on the client object.
:return: Client object instance.
:rtype: docker.client.Client |
def exec_commands(self, action, c_name, run_cmds, **kwargs):
client = action.client
exec_results = []
for run_cmd in run_cmds:
cmd = run_cmd.cmd
cmd_user = run_cmd.user
log.debug("Creating exec command in container %s with user %s: %s.", c_name, cmd_user, cmd)
ec_kwargs = self.get_exec_create_kwargs(action, c_name, cmd, cmd_user)
create_result = client.exec_create(**ec_kwargs)
if create_result:
e_id = create_result['Id']
log.debug("Starting exec command with id %s.", e_id)
es_kwargs = self.get_exec_start_kwargs(action, c_name, e_id)
client.exec_start(**es_kwargs)
exec_results.append(create_result)
else:
log.debug("Exec command was created, but did not return an id. Assuming that it has been started.")
if exec_results:
return exec_results
return None | Runs a single command inside a container.
:param action: Action configuration.
:type action: dockermap.map.runner.ActionConfig
:param c_name: Container name.
:type c_name: unicode | str
:param run_cmds: Commands to run.
:type run_cmds: list[dockermap.map.input.ExecCommand]
:return: List of exec command return values (e.g. containing the command id), if applicable, or ``None``
if either no commands have been run or no values have been returned from the API.
:rtype: list[dict] | NoneType |
def exec_container_commands(self, action, c_name, **kwargs):
config_cmds = action.config.exec_commands
if not config_cmds:
return None
return self.exec_commands(action, c_name, run_cmds=config_cmds) | Runs all configured commands of a container configuration inside the container instance.
:param action: Action configuration.
:type action: dockermap.map.runner.ActionConfig
:param c_name: Container name.
:type c_name: unicode | str
:return: List of exec command return values (e.g. containing the command id), if applicable, or ``None``
if either no commands have been run or no values have been returned from the API.
:rtype: list[dict] | NoneType |
def prepare_path(path, replace_space, replace_sep, expandvars, expanduser):
r_path = path
if expandvars:
r_path = os.path.expandvars(r_path)
if expanduser:
r_path = os.path.expanduser(r_path)
if replace_sep and os.sep != posixpath.sep:
r_path = r_path.replace(os.path.sep, posixpath.sep)
if replace_space:
r_path = r_path.replace(' ', '\\ ')
return r_path | Performs `os.path` replacement operations on a path string.
:param path: Path string
:type path: unicode | str
:param replace_space: Mask spaces with backslash.
:param replace_sep: Replace potentially different path separators with POSIX path notation (use :const:`posixpath.sep`).
:type replace_sep: bool
:param expandvars: Expand environment variables (:func:`~os.path.expandvars`).
:type expandvars: bool
:param expanduser: Expand user variables (:func:`~os.path.expanduser`).
:type expanduser: bool
:return: Path string from `path` with aforementioned replacements.
:rtype: unicode | str |
def format_command(cmd, shell=False):
def _split_cmd():
line = None
for part in cmd.split(' '):
line = part if line is None else '{0} {1}'.format(line, part)
if part[-1] != '\\':
yield line
line = None
if line is not None:
yield line
if cmd in ([], ''):
return '[]'
if shell:
if isinstance(cmd, (list, tuple)):
return ' '.join(cmd)
elif isinstance(cmd, six.string_types):
return cmd
else:
if isinstance(cmd, (list, tuple)):
return json.dumps(map(six.text_type, cmd))
elif isinstance(cmd, six.string_types):
return json.dumps(list(_split_cmd()))
raise ValueError("Invalid type of command string or sequence: {0}".format(cmd)) | Converts a command line to the notation as used in a Dockerfile ``CMD`` and ``ENTRYPOINT`` command. In shell
notation, this returns a simple string, whereas by default it returns a JSON-list format with the command and
arguments.
:param cmd: Command line as a string or tuple.
:type cmd: unicode | str | tuple | list
:param shell: Use the notation so that Docker runs the command in a shell. Default is ``False``.
:type shell: bool
:return: The command string.
:rtype: unicode | str |
def format_expose(expose):
if isinstance(expose, six.string_types):
return expose,
elif isinstance(expose, collections.Iterable):
return map(six.text_type, expose)
return six.text_type(expose), | Converts a port number or multiple port numbers, as used in the Dockerfile ``EXPOSE`` command, to a tuple.
:param: Port numbers, can be as integer, string, or a list/tuple of those.
:type expose: int | unicode | str | list | tuple
:return: A tuple, to be separated by spaces before inserting in a Dockerfile.
:rtype: tuple |
def prefix(self, prefix='#', *args):
self.write(prefix)
if args:
self.write(' ')
self.writeline(' '.join(map(six.text_type, args))) | Prefix one or multiple arguments with a Dockerfile command. The default is ``#``, for comments. Multiple args will
be separated by a space.
:param prefix: Dockerfile command to use, e.g. ``ENV`` or ``RUN``.
:type prefix: unicode | str
:param args: Arguments to be prefixed. |
def prefix_all(self, prefix='#', *lines):
for line in lines:
if isinstance(line, (tuple, list)):
self.prefix(prefix, *line)
elif line:
self.prefix(prefix, line)
else:
self.blank() | Same as :func:`~prefix`, for multiple lines.
:param prefix: Dockerfile command to use, e.g. ``ENV`` or ``RUN``.
:type prefix: unicode | str
:param lines: Lines with arguments to be prefixed.
:type lines: collections.Iterable[unicode | str] |
def add_archive(self, src_file, remove_final=False):
with tarfile.open(src_file, 'r') as tf:
member_names = [member.name
for member in tf.getmembers()
if posixpath.sep not in member.name]
self.prefix_all('ADD', *zip(member_names, member_names))
if remove_final:
self._remove_files.update(member_names)
self._archives.append(src_file)
return member_names | Adds the contents of another tarfile to the build. It will be repackaged during context generation, and added
to the root level of the file system. Therefore, it is not required that tar (or compression utilities) is
present in the base image.
:param src_file: Tar archive to add.
:type src_file: unicode | str
:param remove_final: Remove the contents after the build operation has completed. Note that this will remove all
top-level components of the tar archive recursively. Therefore, you should not use this on standard unix
folders. This will also not reduce the size of the resulting image (actually may increase instead) unless the
image is squashed.
:type remove_final: bool
:return: Name of the root files / directories added to the Dockerfile.
:rtype: list[unicode | str] |
def add_volume(self, path):
self.check_not_finalized()
if self.volumes is None:
self.volumes = [path]
else:
self.volumes.append(path) | Add a shared volume (i.e. with the ``VOLUME`` command). Not actually written until finalized.
:param path: Path to the shared volume. |
def write(self, input_str):
self.check_not_finalized()
if isinstance(input_str, six.binary_type):
self.fileobj.write(input_str)
else:
self.fileobj.write(input_str.encode('utf-8')) | Adds content to the Dockerfile.
:param input_str: Content.
:type input_str: unicode | str |
def finalize(self):
if self._finalized:
return
if self._remove_files:
for filename in self._remove_files:
self.prefix('RUN', 'rm -Rf', filename)
self.blank()
if self._volumes is not None:
self.prefix('VOLUME', json.dumps(self._volumes))
if self._cmd_user:
self.prefix('USER', self._cmd_user)
if self._cmd_workdir:
self.prefix('WORKDIR', self._cmd_workdir)
if self._shell:
self.prefix('SHELL', self._shell)
if self._entrypoint is not None:
self.prefix('ENTRYPOINT', format_command(self._entrypoint, self._command_shell))
if self._command is not None:
self.prefix('CMD', format_command(self._command, self._command_shell))
if self._expose is not None:
self.prefix('EXPOSE', *format_expose(self._expose))
if self._labels:
self.prefix('LABEL', *format_labels(self._labels))
if self._stopsignal:
self.prefix('STOPSIGNAL', self._stopsignal)
if self._healthcheck:
self.prefix('HEALTHCHECK', self._healthcheck)
super(DockerFile, self).finalize() | Finalizes the Dockerfile. Before the buffer is practically marked as read-only, the following Dockerfile
commands are written:
* ``RUN rm -R`` on each files marked for automatic removal;
* ``VOLUME`` for shared volumes;
* ``USER`` as the default user for following commands;
* ``WORKDIR`` as the working directory for following commands;
* ``SHELL`` if the default shell is to be changed;
* ``ENTRYPOINT`` and ``CMD``, each formatted as a shell or exec command according to :attr:`command_shell`;
* ``EXPOSE`` for exposed ports;
* ``LABEL``, ``STOPSIGNAL``, and ``HEALTHCHECK`` instructions for the image;
An attempt to finalize an already-finalized instance has no effect. |
def merge_dependency(self, item, resolve_parent, parents):
dep = []
for parent_key in parents:
if item == parent_key:
raise CircularDependency(item, True)
if parent_key.config_type == ItemType.CONTAINER:
parent_dep = resolve_parent(parent_key)
if item in parent_dep:
raise CircularDependency(item)
merge_list(dep, parent_dep)
merge_list(dep, parents)
return dep | Merge dependencies of current configuration with further dependencies; in this instance, it means that in case
of container configuration first parent dependencies are checked, and then immediate dependencies of the current
configuration should be added to the list, but without duplicating any entries.
:param item: Configuration item.
:type item: (unicode | str, unicode | str, unicode | str, unicode | str)
:param resolve_parent: Function to resolve parent dependencies.
:type resolve_parent: function
:type parents: collections.Iterable[(unicode | str, unicode | str, unicode | str, unicode | str)]
:return: List of recursively resolved dependencies of this container.
:rtype: list[(unicode | str, unicode | str, unicode | str, unicode | str)]
:raise CircularDependency: If the current element depends on one found deeper in the hierarchy. |
def expand_node(loader, node, expand_method):
if isinstance(node, yaml.nodes.ScalarNode):
val = loader.construct_scalar(node)
return expand_method(val)
elif isinstance(node, yaml.nodes.MappingNode):
val = loader.construct_mapping(node)
for d_key, d_val in six.iteritems(val):
val[d_key] = expand_method(d_val)
return val
elif isinstance(node, yaml.nodes.SequenceNode):
val = loader.construct_sequence(node)
return [expand_method(l_val) for l_val in val] | Expands paths on a YAML document node. If it is a sequence node (list) items on the first level are expanded. For
a mapping node (dict), values are expanded.
:param loader: YAML loader.
:type loader: yaml.loader.SafeLoader
:param node: Document node.
:type node: ScalarNode, MappingNode, or SequenceNode
:param expand_method: Callable to expand the path with.
:type expand_method: callable
:return: Expanded value.
:rtype: unicode | str | list | dict |
def load_map(stream, name=None, check_integrity=True, check_duplicates=True):
map_dict = yaml.safe_load(stream)
if isinstance(map_dict, dict):
map_name = name or map_dict.pop('name', None)
if not map_name:
raise ValueError("No map name provided, and none found in YAML stream.")
return ContainerMap(map_name, map_dict, check_integrity=check_integrity, check_duplicates=check_duplicates)
raise ValueError("Valid map could not be decoded.") | Loads a ContainerMap configuration from a YAML document stream.
:param stream: YAML stream.
:type stream: file
:param name: Name of the ContainerMap. If not provided, will be attempted to read from a ``name`` attribute on the
document root level.
:type name: unicode | str
:param check_integrity: Performs a brief integrity check; default is ``True``.
:type check_integrity: bool
:param check_duplicates: Check for duplicate attached volumes during integrity check.
:type check_duplicates: bool
:return: A ContainerMap object.
:rtype: ContainerMap |
def load_clients(stream, configuration_class=ClientConfiguration):
client_dict = yaml.safe_load(stream)
if isinstance(client_dict, dict):
return {client_name: configuration_class(**client_config)
for client_name, client_config in six.iteritems(client_dict)}
raise ValueError("Valid configuration could not be decoded.") | Loads client configurations from a YAML document stream.
:param stream: YAML stream.
:type stream: file
:param configuration_class: Class of the configuration object to create.
:type configuration_class: class
:return: A dictionary of client configuration objects.
:rtype: dict[unicode | str, dockermap.map.config.client.ClientConfiguration] |
def load_map_file(filename, name=None, check_integrity=True):
if name == '':
base_name = os.path.basename(filename)
map_name, __, __ = os.path.basename(base_name).rpartition(os.path.extsep)
else:
map_name = name
with open(filename, 'r') as f:
return load_map(f, name=map_name, check_integrity=check_integrity) | Loads a ContainerMap configuration from a YAML file.
:param filename: YAML file name.
:type filename: unicode | str
:param name: Name of the ContainerMap. If ``None`` will attempt to find a ``name`` element on the root level of
the document; an empty string names the map according to the file, without extension.
:type name: unicode | str
:param check_integrity: Performs a brief integrity check; default is ``True``.
:type check_integrity: bool
:return: A ContainerMap object.
:rtype: ContainerMap |
def load_clients_file(filename, configuration_class=ClientConfiguration):
with open(filename, 'r') as f:
return load_clients(f, configuration_class=configuration_class) | Loads client configurations from a YAML file.
:param filename: YAML file name.
:type filename: unicode | str
:param configuration_class: Class of the configuration object to create.
:type configuration_class: class
:return: A dictionary of client configuration objects.
:rtype: dict[unicode | str, dockermap.map.config.client.ClientConfiguration] |
def get_policy(self):
if not self._policy:
self._policy = self.policy_class(self._maps, self._clients)
return self._policy | Returns an instance of :attr:`~policy_class`.
:return: An instance of the current policy class.
:rtype: dockermap.map.policy.base.BasePolicy |
def get_state_generator(self, action_name, policy, kwargs):
state_generator_cls = self.generators[action_name][0]
state_generator = state_generator_cls(policy, kwargs)
return state_generator | Returns the state generator to be used for the given action.
:param action_name: Action identifier name.
:type action_name: unicode | str
:param policy: An instance of the current policy class.
:type policy: dockermap.map.policy.base.BasePolicy
:param kwargs: Keyword arguments. Can be modified by the initialization of the state generator.
:type kwargs: dict
:return: State generator object.
:rtype: dockermap.map.state.base.AbstractStateGenerator |
def get_action_generator(self, action_name, policy, kwargs):
action_generator_cls = self.generators[action_name][1]
action_generator = action_generator_cls(policy, kwargs)
return action_generator | Returns the action generator to be used for the given action.
:param action_name: Action identifier name.
:type action_name: unicode | str
:param policy: An instance of the current policy class.
:type policy: dockermap.map.policy.base.BasePolicy
:param kwargs: Keyword arguments. Can be modified by the initialization of the action generator.
:type kwargs: dict
:return: Action generator object.
:rtype: dockermap.map.action.base.AbstractActionGenerator |
def get_states(self, action_name, config_name, instances=None, map_name=None, **kwargs):
policy = self.get_policy()
_set_forced_update_ids(kwargs, policy.container_maps, map_name or self._default_map, instances)
state_generator = self.get_state_generator(action_name, policy, kwargs)
log.debug("Remaining kwargs passed to client actions: %s", kwargs)
config_ids = get_map_config_ids(config_name, policy.container_maps, map_name or self._default_map,
instances)
log.debug("Generating states for configurations: %s", config_ids)
return state_generator.get_states(config_ids) | Returns a generator of states in relation to the indicated action.
:param action_name: Action name.
:type action_name: unicode | str
:param config_name: Name(s) of container configuration(s) or MapConfigId tuple(s).
:type config_name: unicode | str | collections.Iterable[unicode | str] | dockermap.map.input.InputConfigId | collections.Iterable[dockermap.map.input.InputConfigId]
:param instances: Optional instance names, where applicable but not included in ``config_name``.
:type instances: unicode | str | collections.Iterable[unicode | str]
:param map_name: Optional map name, where not inlcuded in ``config_name``.
:param kwargs: Additional kwargs for state generation, action generation, runner, or the client action.
:return: Resulting states of the configurations.
:rtype: collections.Iterable[dockermap.map.state.ConfigState] |
def get_actions(self, action_name, config_name, instances=None, map_name=None, **kwargs):
policy = self.get_policy()
action_generator = self.get_action_generator(action_name, policy, kwargs)
for state in self.get_states(action_name, config_name, instances=instances, map_name=map_name, **kwargs):
log.debug("Evaluating state: %s.", state)
actions = action_generator.get_state_actions(state, **kwargs)
if actions:
log.debug("Running actions: %s", actions)
yield actions
else:
log.debug("No actions returned.") | Returns the entire set of actions performed for the indicated action name.
:param action_name: Action name.
:type action_name: unicode | str
:param config_name: Name(s) of container configuration(s) or MapConfigId tuple(s).
:type config_name: unicode | str | collections.Iterable[unicode | str] | dockermap.map.input.MapConfigId | collections.Iterable[dockermap.map.input.MapConfigId]
:param instances: Optional instance names, where applicable but not included in ``config_name``.
:type instances: unicode | str | collections.Iterable[unicode | str]
:param map_name: Optional map name, where not inlcuded in ``config_name``.
:param kwargs: Additional kwargs for state generation, action generation, runner, or the client action.
:return: Resulting actions of the configurations.
:rtype: collections.Iterable[list[dockermap.map.action.ItemAction]] |
def run_actions(self, action_name, config_name, instances=None, map_name=None, **kwargs):
policy = self.get_policy()
results = []
runner = self.get_runner(policy, kwargs)
for action_list in self.get_actions(action_name, config_name, instances, map_name, **kwargs):
try:
for res in runner.run_actions(action_list):
results.append(res)
except ActionException as ae:
raise ActionRunnerException.from_action_exception(ae, results)
except:
exc_info = sys.exc_info()
raise PartialResultsError(exc_info, results)
return results | Runs the entire set of actions performed for the indicated action name. On any client failure this raises a
:class:`~dockermap.map.exceptions.ActionRunnerException`, where partial results can be reviewed in the property
``results``, or :class:`~dockermap.exceptions.MiscInvocationError` if no particular action was performed.
:param action_name: Action name.
:type action_name: unicode | str
:param config_name: Name(s) of container configuration(s) or MapConfigId tuple(s).
:type config_name: unicode | str | collections.Iterable[unicode | str] | dockermap.map.input.MapConfigId | collections.Iterable[dockermap.map.input.MapConfigId]
:param instances: Optional instance names, where applicable but not included in ``config_name``.
:type instances: unicode | str | collections.Iterable[unicode | str]
:param map_name: Optional map name, where not inlcuded in ``config_name``.
:param kwargs: Additional kwargs for state generation, action generation, runner, or the client action.
:return: Client output of actions of the configurations.
:rtype: list[dockermap.map.runner.ActionOutput] |
def create(self, container, instances=None, map_name=None, **kwargs):
return self.run_actions('create', container, instances=instances, map_name=map_name, **kwargs) | Creates container instances for a container configuration.
:param container: Container name.
:type container: unicode | str
:param instances: Instance name to create. If not specified, will create all instances as specified in the
configuration (or just one default instance).
:type instances: tuple | list
:param map_name: Container map name. Optional - if not provided the default map is used.
:type map_name: unicode | str
:param kwargs: Additional kwargs. If multiple actions are resulting from this, they will only be applied to
the main container creation.
:return: Return values of created containers.
:rtype: list[dockermap.map.runner.ActionOutput] |
def start(self, container, instances=None, map_name=None, **kwargs):
return self.run_actions('start', container, instances=instances, map_name=map_name, **kwargs) | Starts instances for a container configuration.
:param container: Container name.
:type container: unicode | str
:param instances: Instance names to start. If not specified, will start all instances as specified in the
configuration (or just one default instance).
:param map_name: Container map name. Optional - if not provided the default map is used.
:type map_name: unicode | str
:type instances: collections.Iterable[unicode | str | NoneType]
:param kwargs: Additional kwargs. If multiple actions are resulting from this, they will only be applied to
the main container start.
:return: Return values of started containers.
:rtype: list[dockermap.map.runner.ActionOutput] |
def restart(self, container, instances=None, map_name=None, **kwargs):
return self.run_actions('restart', container, instances=instances, map_name=map_name, **kwargs) | Restarts instances for a container configuration.
:param container: Container name.
:type container: unicode | str
:param instances: Instance names to stop. If not specified, will restart all instances as specified in the
configuration (or just one default instance).
:type instances: collections.Iterable[unicode | str | NoneType]
:param map_name: Container map name. Optional - if not provided the default map is used.
:type map_name: unicode | str
:param kwargs: Additional kwargs. If multiple actions are resulting from this, they will only be applied to
the main container restart.
:return: Return values of restarted containers.
:rtype: list[dockermap.map.runner.ActionOutput] |
def stop(self, container, instances=None, map_name=None, **kwargs):
return self.run_actions('stop', container, instances=instances, map_name=map_name, **kwargs) | Stops instances for a container configuration.
:param container: Container name.
:type container: unicode | str
:param instances: Instance names to stop. If not specified, will stop all instances as specified in the
configuration (or just one default instance).
:type instances: collections.Iterable[unicode | str | NoneType]
:param map_name: Container map name. Optional - if not provided the default map is used.
:type map_name: unicode | str
:param raise_on_error: Errors on stop and removal may result from Docker volume problems, that do not further
affect further actions. Such errors are always logged, but do not raise an exception unless this is set to
``True``. Please note that 404 errors (on non-existing containers) are always ignored on stop and removal.
:type raise_on_error: bool
:param kwargs: Additional kwargs. If multiple actions are resulting from this, they will only be applied to
the main container stop.
:return: Return values of stopped containers.
:rtype: list[dockermap.map.runner.ActionOutput] |
def remove(self, container, instances=None, map_name=None, **kwargs):
return self.run_actions('remove', container, instances=instances, map_name=map_name, **kwargs) | Remove instances from a container configuration.
:param container: Container name.
:type container: unicode | str
:param instances: Instance names to remove. If not specified, will remove all instances as specified in the
configuration (or just one default instance).
:type instances: collections.Iterable[unicode | str | NoneType]
:param map_name: Container map name. Optional - if not provided the default map is used.
:type map_name: unicode | str
:param kwargs: Additional kwargs. If multiple actions are resulting from this, they will only be applied to
the main container removal.
:return: Return values of removed containers.
:rtype: list[dockermap.map.runner.ActionOutput] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.