code
stringlengths
52
7.75k
docs
stringlengths
1
5.85k
def terminal_attribute_iterator(self, mapped_class=None, key=None): for attr in self._attribute_iterator(mapped_class, key): if attr.kind == RESOURCE_ATTRIBUTE_KINDS.TERMINAL: yield attr
Returns an iterator over all terminal mapped attributes for the given mapped class and attribute key. See :method:`get_attribute_map` for details.
def create_data_element(self, mapped_class=None): if not mapped_class is None and mapped_class != self.__mapped_cls: mp = self.__mp_reg.find_or_create_mapping(mapped_class) data_el = mp.create_data_element() else: data_el = self.__de_cls.create() return data_el
Returns a new data element for the given mapped class. :returns: object implementing :class:`IResourceDataElement`.
def create_linked_data_element(self, url, kind, id=None, # pylint: disable=W0622 relation=None, title=None): mp = self.__mp_reg.find_or_create_mapping(Link) return mp.data_element_class.create(url, kind, id=id, relation=relation, title=title)
Returns a new linked data element for the given url and kind. :param str url: URL to assign to the linked data element. :param str kind: kind of the resource that is linked. One of the constantes defined by :class:`everest.constants.RESOURCE_KINDS`. :returns: object implementing :class:`ILinkedDataElement`.
def create_data_element_from_resource(self, resource): mp = self.__mp_reg.find_or_create_mapping(type(resource)) return mp.data_element_class.create_from_resource(resource)
Returns a new data element for the given resource object. :returns: object implementing :class:`IResourceDataElement`.
def create_linked_data_element_from_resource(self, resource): mp = self.__mp_reg.find_or_create_mapping(Link) return mp.data_element_class.create_from_resource(resource)
Returns a new linked data element for the given resource object. :returns: object implementing :class:`ILinkedDataElement`.
def map_to_resource(self, data_element, resource=None): if not IDataElement.providedBy(data_element): # pylint:disable=E1101 raise ValueError('Expected data element, got %s.' % data_element) if resource is None: coll = \ create_staging_collection(data_element.mapping.mapped_class) agg = coll.get_aggregate() agg.add(data_element) if IMemberDataElement.providedBy(data_element): # pylint: disable=E1101 ent = next(iter(agg)) resource = \ data_element.mapping.mapped_class.create_from_entity(ent) else: resource = coll else: resource.update(data_element) return resource
Maps the given data element to a new resource or updates the given resource. :raises ValueError: If :param:`data_element` does not provide :class:`everest.representers.interfaces.IDataElement`.
def map_to_data_element(self, resource): trv = ResourceTreeTraverser(resource, self.as_pruning()) visitor = DataElementBuilderResourceTreeVisitor(self) trv.run(visitor) return visitor.data_element
Maps the given resource to a data element tree.
def as_pruning(self): return PruningMapping(self.__mp_reg, self.__mapped_cls, self.__de_cls, self.__configurations[-1])
Returns a clone of this mapping with the `is_pruning` flag set to *True*.
def push_configuration(self, configuration): self.__mapped_attr_cache.clear() self.__configurations.append(configuration)
Pushes the given configuration object on the stack of configurations managed by this mapping and makes it the active configuration.
def pop_configuration(self): if len(self.__configurations) == 1: raise IndexError('Can not pop the last configuration from the ' 'stack of configurations.') self.__configurations.pop() self.__mapped_attr_cache.clear()
Pushes the currently active configuration from the stack of configurations managed by this mapping. :raises IndexError: If there is only one configuration in the stack.
def with_updated_configuration(self, options=None, attribute_options=None): new_cfg = self.__configurations[-1].copy() if not options is None: for o_name, o_value in iteritems_(options): new_cfg.set_option(o_name, o_value) if not attribute_options is None: for attr_name, ao_opts in iteritems_(attribute_options): for ao_name, ao_value in iteritems_(ao_opts): new_cfg.set_attribute_option(attr_name, ao_name, ao_value) # upd_cfg = type(new_cfg)(options=options, # attribute_options=attribute_options) # new_cfg.update(upd_cfg) return MappingConfigurationContext(self, new_cfg)
Returns a context in which this mapping is updated with the given options and attribute options.
def _attribute_iterator(self, mapped_class, key): for attr in \ itervalues_(self.__get_attribute_map(mapped_class, key, 0)): if self.is_pruning: do_ignore = attr.should_ignore(key) else: do_ignore = False if not do_ignore: yield attr
Returns an iterator over the attributes in this mapping for the given mapped class and attribute key. If this is a pruning mapping, attributes that are ignored because of a custom configuration or because of the default ignore rules are skipped.
def create_mapping(self, mapped_class, configuration=None): cfg = self.__configuration.copy() if not configuration is None: cfg.update(configuration) provided_ifcs = provided_by(object.__new__(mapped_class)) if IMemberResource in provided_ifcs: base_data_element_class = self.member_data_element_base_class elif ICollectionResource in provided_ifcs: base_data_element_class = self.collection_data_element_base_class elif IResourceLink in provided_ifcs: base_data_element_class = self.linked_data_element_base_class else: raise ValueError('Mapped class for data element class does not ' 'implement one of the required interfaces.') name = "%s%s" % (mapped_class.__name__, base_data_element_class.__name__) de_cls = type(name, (base_data_element_class,), {}) mp = self.mapping_class(self, mapped_class, de_cls, cfg) # Set the data element class' mapping. # FIXME: This looks like a hack. de_cls.mapping = mp return mp
Creates a new mapping for the given mapped class and representer configuration. :param configuration: configuration for the new data element class. :type configuration: :class:`RepresenterConfiguration` :returns: newly created instance of :class:`Mapping`
def find_mapping(self, mapped_class): if not self.__is_initialized: self.__is_initialized = True self._initialize() mapping = None for base_cls in mapped_class.__mro__: try: mapping = self.__mappings[base_cls] except KeyError: continue else: break return mapping
Returns the mapping registered for the given mapped class or any of its base classes. Returns `None` if no mapping can be found. :param mapped_class: mapped type :type mapped_class: type :returns: instance of :class:`Mapping` or `None`
def find_or_create_mapping(self, mapped_class): mapping = self.find_mapping(mapped_class) if mapping is None: mapping = self.create_mapping(mapped_class) self.set_mapping(mapping) return mapping
First calls :meth:`find_mapping` to check if a mapping for the given mapped class or any of its base classes has been created. If not, a new one is created with a default configuration, registered automatically and returned.
def json_serial(obj): if isinstance(obj, LegipyModel): return obj.to_json() elif isinstance(obj, (datetime.date, datetime.datetime)): return obj.isoformat() raise TypeError("Type {0} not serializable".format(repr(type(obj))))
JSON serializer for objects not serializable by default json code
def eachMethod(decorator, methodFilter=lambda fName: True): if isinstance(methodFilter, basestring): # Is it a string? If it is, change it into a function that takes a string. prefix = methodFilter methodFilter = lambda fName: fName.startswith(prefix) ismethod = lambda fn: inspect.ismethod(fn) or inspect.isfunction(fn) def innerDeco(cls): assert inspect.isclass(cls), "eachMethod is designed to be used only on classes" for fName, fn in inspect.getmembers(cls): if methodFilter(fName): if ismethod(fn): # We attempt to avoid decorating staticmethods by looking for an arg named cls # or self; this is a kludge, but there's no other way to tell, and # staticmethods do not work correctly with eachMethod if getargspec(fn).args[0] not in ['cls', 'self']: continue setattr(cls, fName, decorator(fn)) return cls return innerDeco
Class decorator that wraps every single method in its own method decorator methodFilter: a function which accepts a function name and should return True if the method is one which we want to decorate, False if we want to leave this method alone. methodFilter can also be simply a string prefix. If it is a string, it is assumed to be the prefix we're looking for.
def _sibpath(path, sibling): return os.path.join(os.path.dirname(os.path.abspath(path)), sibling)
Return the path to a sibling of a file in the filesystem. This is useful in conjunction with the special C{__file__} attribute that Python provides for modules, so modules can load associated resource files. (Stolen from twisted.python.util)
def parseDate(dateString, strict=True): if (not strict) and (not dateString): return None if not isinstance(dateString, basestring): raise TypeError('%r is not a string' % dateString) return parser.parse(dateString)
Return a datetime object, by parsing a string date/time With strict=False, dateString may be None or '', otherwise it must be a parseable string
def cache(cls, func): @functools.wraps(func) def func_wrapper(*args, **kwargs): func_key = cls.get_key(func) val_cache = cls.get_cache(func_key) lock = cls.get_cache_lock(func_key) return cls._get_value_from_cache( func, val_cache, lock, *args, **kwargs) return func_wrapper
Global cache decorator :param func: the function to be decorated :return: the decorator
def instance_cache(cls, func): @functools.wraps(func) def func_wrapper(*args, **kwargs): if not args: raise ValueError('`self` is not available.') else: the_self = args[0] func_key = cls.get_key(func) val_cache = cls.get_self_cache(the_self, func_key) lock = cls.get_self_cache_lock(the_self, func_key) return cls._get_value_from_cache( func, val_cache, lock, *args, **kwargs) return func_wrapper
Save the cache to `self` This decorator take it for granted that the decorated function is a method. The first argument of the function is `self`. :param func: function to decorate :return: the decorator
def clear_instance_cache(cls, func): @functools.wraps(func) def func_wrapper(*args, **kwargs): if not args: raise ValueError('`self` is not available.') else: the_self = args[0] cls.clear_self_cache(the_self) return func(*args, **kwargs) return func_wrapper
clear the instance cache Decorate a method of a class, the first parameter is supposed to be `self`. It clear all items cached by the `instance_cache` decorator. :param func: function to decorate
def persisted(cls, seconds=0, minutes=0, hours=0, days=0, weeks=0): days += weeks * 7 hours += days * 24 minutes += hours * 60 seconds += minutes * 60 if seconds == 0: # default to 1 day seconds = 24 * 60 * 60 def get_persisted_file(hash_number): folder = cls.get_persist_folder() if not os.path.exists(folder): os.makedirs(folder) return os.path.join(folder, '{}.pickle'.format(hash_number)) def is_expired(filename): if os.path.exists(filename): file_age = cls.get_file_age(filename) if file_age > seconds: log.debug('persisted cache expired: {}'.format(filename)) ret = True else: ret = False else: ret = True return ret def decorator(func): def func_wrapper(*args, **kwargs): def _key_gen(): string = '{}-{}-{}-{}'.format( func.__module__, func.__name__, args, kwargs.items() ) return hashlib.sha256(string.encode('utf-8')).hexdigest() key = _key_gen() persisted_file = get_persisted_file(key) if is_expired(persisted_file): ret = func(*args, **kwargs) with open(persisted_file, 'wb') as f: pickle.dump(ret, f) else: with open(persisted_file, 'rb') as f: ret = pickle.load(f) return ret return func_wrapper return decorator
Cache the return of the function for given time. Default to 1 day. :param weeks: as name :param seconds: as name :param minutes: as name :param hours: as name :param days: as name :return: return of the function decorated
def getEvents(self, repo_user, repo_name, until_id=None): done = False page = 0 events = [] while not done: new_events = yield self.api.makeRequest( ['repos', repo_user, repo_name, 'events'], page) # terminate if we find a matching ID if new_events: for event in new_events: if event['id'] == until_id: done = True break events.append(event) else: done = True page += 1 defer.returnValue(events)
Get all repository events, following paging, until the end or until UNTIL_ID is seen. Returns a Deferred.
def getHook(self, repo_user, repo_name, hook_id): return self.api.makeRequest( ['repos', repo_user, repo_name, 'hooks', str(hook_id)], method='GET', )
GET /repos/:owner/:repo/hooks/:id Returns the Hook.
def editHook(self, repo_user, repo_name, hook_id, name, config, events=None, add_events=None, remove_events=None, active=None): post = dict( name=name, config=config, ) if events is not None: post['events'] = events if add_events is not None: post['add_events'] = add_events if remove_events is not None: post['remove_events'] = remove_events if active is not None: post['active'] = active return self.api.makeRequest( ['repos', repo_user, repo_name, 'hooks', str(hook_id)], method='PATCH', post=post, )
PATCH /repos/:owner/:repo/hooks/:id :param hook_id: Id of the hook. :param name: The name of the service that is being called. :param config: A Hash containing key/value pairs to provide settings for this hook.
def getStatuses(self, repo_user, repo_name, sha): return self.api.makeRequest( ['repos', repo_user, repo_name, 'statuses', sha], method='GET')
:param sha: Full sha to list the statuses from. :return: A defered with the result from GitHub.
def createStatus(self, repo_user, repo_name, sha, state, target_url=None, description=None, context=None): payload = {'state': state} if description is not None: payload['description'] = description if target_url is not None: payload['target_url'] = target_url if context is not None: payload['context'] = context return self.api.makeRequest( ['repos', repo_user, repo_name, 'statuses', sha], method='POST', post=payload)
:param sha: Full sha to create the status for. :param state: one of the following 'pending', 'success', 'error' or 'failure'. :param target_url: Target url to associate with this status. :param description: Short description of the status. :return: A defered with the result from GitHub.
def edit(self, repo_user, repo_name, pull_number, title=None, body=None, state=None): if not any((title, body, state)): raise ValueError("must provide at least one of:" " title, body, state") post = {} if title is not None: post['title'] = title if body is not None: post['body'] = body if state is not None: if state not in ('open', 'closed'): raise ValueError("state must be either 'open' or 'closed'") post['state'] = state return self.api.makeRequest( ['repos', repo_user, repo_name, 'pulls', pull_number], method='PATCH', post=post)
PATCH /repos/:owner/:repo/pulls/:number :param pull_number: The pull request's number :param title: The new title for the pull request :param body: The new top-level body for the pull request :param state: The new top-level body for the pull request
def create(self, repo_user, repo_name, issue_number, body): return self.api.makeRequest( ['repos', repo_user, repo_name, 'issues', issue_number, 'comments'], method='POST', post=dict(body=body))
PATCH /repos/:owner/:repo/issues/:number/comments :param issue_number: The issue's (or pull request's) number :param body: The body of this comment
def getPullRequestComments(self, repo_user, repo_name, pull_number): return self.api.makeRequestAllPages( ['repos', repo_user, repo_name, 'pulls', str(pull_number), 'comments'])
GET /repos/:owner/:repo/pulls/:number/comments :param pull_number: The pull request's number.
def getComment(self, repo_user, repo_name, comment_id): return self.api.makeRequest( ['repos', repo_user, repo_name, 'pulls', 'comments', str(comment_id)])
GET /repos/:owner/:repo/pull/comments/:number :param comment_id: The review comment's ID.
def createComment(self, repo_user, repo_name, pull_number, body, commit_id, path, position): return self.api.makeRequest( ["repos", repo_user, repo_name, "pulls", str(pull_number), "comments"], method="POST", data=dict(body=body, commit_id=commit_id, path=path, position=position))
POST /repos/:owner/:repo/pulls/:number/comments :param pull_number: The pull request's ID. :param body: The text of the comment. :param commit_id: The SHA of the commit to comment on. :param path: The relative path of the file to comment on. :param position: The line index in the diff to comment on.
def replyToComment(self, repo_user, repo_name, pull_number, body, in_reply_to): return self.api.makeRequest( ["repos", repo_user, repo_name, "pulls", str(pull_number), "comments"], method="POST", data=dict(body=body, in_reply_to=in_reply_to))
POST /repos/:owner/:repo/pulls/:number/comments Like create, but reply to an existing comment. :param body: The text of the comment. :param in_reply_to: The comment ID to reply to.
def editComment(self, repo_user, repo_name, comment_id, body): return self.api.makeRequest( ["repos", repo_user, repo_name, "pulls", "comments", str(comment_id)], method="POST", data=dict(body=body))
PATCH /repos/:owner/:repo/pulls/comments/:id :param comment_id: The ID of the comment to edit :param body: The new body of the comment.
def deleteComment(self, repo_user, repo_name, comment_id): return self.api.makeRequest( ["repos", repo_user, repo_name, "pulls", "comments", str(comment_id)], method="DELETE")
DELETE /repos/:owner/:repo/pulls/comments/:id :param comment_id: The ID of the comment to edit :param body: The new body of the comment.
def from_project_path(cls, path): path = vistir.compat.Path(path) if path.name == 'Pipfile': pipfile_path = path path = path.parent else: pipfile_path = path / 'Pipfile' pipfile_location = cls.normalize_path(pipfile_path) venv_path = path / '.venv' if venv_path.exists(): if not venv_path.is_dir(): possible_path = vistir.compat.Path(venv_path.read_text().strip()) if possible_path.exists(): return cls(possible_path.as_posix()) else: if venv_path.joinpath('lib').exists(): return cls(venv_path.as_posix()) sanitized = re.sub(r'[ $`!*@"\\\r\n\t]', "_", path.name)[0:42] hash_ = hashlib.sha256(pipfile_location.encode()).digest()[:6] encoded_hash = base64.urlsafe_b64encode(hash_).decode() hash_fragment = encoded_hash[:8] venv_name = "{0}-{1}".format(sanitized, hash_fragment) return cls(cls.get_workon_home().joinpath(venv_name).as_posix())
Utility for finding a virtualenv location based on a project path
def get_sys_path(cls, python_path): command = [python_path, "-c", "import json, sys; print(json.dumps(sys.path))"] c = vistir.misc.run(command, return_object=True, block=True, nospin=True) assert c.returncode == 0, "failed loading virtualenv path" sys_path = json.loads(c.out.strip()) return sys_path
Get the :data:`sys.path` data for a given python executable. :param str python_path: Path to a specific python executable. :return: The system path information for that python runtime. :rtype: list
def get_setup_install_args(self, pkgname, setup_py, develop=False): headers = self.base_paths["headers"] headers = headers / "python{0}".format(self.python_version) / pkgname install_arg = "install" if not develop else "develop" return [ self.python, "-u", "-c", SETUPTOOLS_SHIM % setup_py, install_arg, "--single-version-externally-managed", "--install-headers={0}".format(self.base_paths["headers"]), "--install-purelib={0}".format(self.base_paths["purelib"]), "--install-platlib={0}".format(self.base_paths["platlib"]), "--install-scripts={0}".format(self.base_paths["scripts"]), "--install-data={0}".format(self.base_paths["data"]), ]
Get setup.py install args for installing the supplied package in the virtualenv :param str pkgname: The name of the package to install :param str setup_py: The path to the setup file of the package :param bool develop: Whether the package is in development mode :return: The installation arguments to pass to the interpreter when installing :rtype: list
def setuptools_install(self, chdir_to, pkg_name, setup_py_path=None, editable=False): install_options = ["--prefix={0}".format(self.prefix.as_posix()),] with vistir.contextmanagers.cd(chdir_to): c = self.run( self.get_setup_install_args(pkg_name, setup_py_path, develop=editable) + install_options, cwd=chdir_to ) return c.returncode
Install an sdist or an editable package into the virtualenv :param str chdir_to: The location to change to :param str setup_py_path: The path to the setup.py, if applicable defaults to None :param bool editable: Whether the package is editable, defaults to False
def install(self, req, editable=False, sources=[]): try: packagebuilder = self.safe_import("packagebuilder") except ImportError: packagebuilder = None with self.activated(include_extras=False): if not packagebuilder: return 2 ireq = req.as_ireq() sources = self.filter_sources(req, sources) cache_dir = os.environ.get('PASSA_CACHE_DIR', os.environ.get( 'PIPENV_CACHE_DIR', vistir.path.create_tracked_tempdir(prefix="passabuild") ) ) built = packagebuilder.build.build(ireq, sources, cache_dir) if isinstance(built, distlib.wheel.Wheel): maker = distlib.scripts.ScriptMaker(None, None) built.install(self.paths, maker) else: path = vistir.compat.Path(built.path) cd_path = path.parent setup_py = cd_path.joinpath("setup.py") return self.setuptools_install( cd_path.as_posix(), req.name, setup_py.as_posix(), editable=req.editable ) return 0
Install a package into the virtualenv :param req: A requirement to install :type req: :class:`requirementslib.models.requirement.Requirement` :param bool editable: Whether the requirement is editable, defaults to False :param list sources: A list of pip sources to consult, defaults to [] :return: A return code, 0 if successful :rtype: int
def run(self, cmd, cwd=os.curdir): c = None with self.activated(): script = vistir.cmdparse.Script.parse(cmd) c = vistir.misc.run(script._parts, return_object=True, nospin=True, cwd=cwd) return c
Run a command with :class:`~subprocess.Popen` in the context of the virtualenv :param cmd: A command to run in the virtual environment :type cmd: str or list :param str cwd: The working directory in which to execute the command, defaults to :data:`os.curdir` :return: A finished command object :rtype: :class:`~subprocess.Popen`
def get_monkeypatched_pathset(self): from pip_shims.shims import InstallRequirement # Determine the path to the uninstall module name based on the install module name uninstall_path = InstallRequirement.__module__.replace( "req_install", "req_uninstall" ) req_uninstall = self.safe_import(uninstall_path) self.recursive_monkey_patch.monkey_patch( PatchedUninstaller, req_uninstall.UninstallPathSet ) return req_uninstall.UninstallPathSet
Returns a monkeypatched `UninstallPathset` for using to uninstall packages from the virtualenv :return: A patched `UninstallPathset` which enables uninstallation of venv packages :rtype: :class:`pip._internal.req.req_uninstall.UninstallPathset`
def uninstall(self, pkgname, *args, **kwargs): auto_confirm = kwargs.pop("auto_confirm", True) verbose = kwargs.pop("verbose", False) with self.activated(): pathset_base = self.get_monkeypatched_pathset() dist = next( iter(filter(lambda d: d.project_name == pkgname, self.get_working_set())), None ) pathset = pathset_base.from_dist(dist) if pathset is not None: pathset.remove(auto_confirm=auto_confirm, verbose=verbose) try: yield pathset except Exception as e: if pathset is not None: pathset.rollback() else: if pathset is not None: pathset.commit() if pathset is None: return
A context manager which allows uninstallation of packages from the virtualenv :param str pkgname: The name of a package to uninstall >>> venv = VirtualEnv("/path/to/venv/root") >>> with venv.uninstall("pytz", auto_confirm=True, verbose=False) as uninstaller: cleaned = uninstaller.paths >>> if cleaned: print("uninstalled packages: %s" % cleaned)
def remove_examples_all(): d = examples_all_dir() if d.exists(): log.debug('remove %s', d) d.rmtree() else: log.debug('nothing to remove: %s', d)
remove arduino/examples/all directory. :rtype: None
def getRootNode(nodes): '''Return the node with the most children''' max = 0 root = None for i in nodes: if len(i.children) > max: max = len(i.children) root = i return roof getRootNode(nodes): '''Return the node with the most children''' max = 0 root = None for i in nodes: if len(i.children) > max: max = len(i.children) root = i return root
Return the node with the most children
def getNextNode(nodes,usednodes,parent): '''Get next node in a breadth-first traversal of nodes that have not been used yet''' for e in edges: if e.source==parent: if e.target in usednodes: x = e.target break elif e.target==parent: if e.source in usednoes: x = e.source break return f getNextNode(nodes,usednodes,parent): '''Get next node in a breadth-first traversal of nodes that have not been used yet''' for e in edges: if e.source==parent: if e.target in usednodes: x = e.target break elif e.target==parent: if e.source in usednoes: x = e.source break return x
Get next node in a breadth-first traversal of nodes that have not been used yet
def getCircles(rawnodes,rawedges): ''' Example input: rawnodes = [1,2,3,4,5,6] rawedges = [(1,2),(1,3),(1,4),(2,4),(1,5),(5,6)] Returns an array of Circle objects with attribute child arrays populated. ''' circles = [] for x in rawnodes: i = Circle(str(x)) for (p,q) in rawedges: if p==x: i.addChild(q) circles.append(i) return circlef getCircles(rawnodes,rawedges): ''' Example input: rawnodes = [1,2,3,4,5,6] rawedges = [(1,2),(1,3),(1,4),(2,4),(1,5),(5,6)] Returns an array of Circle objects with attribute child arrays populated. ''' circles = [] for x in rawnodes: i = Circle(str(x)) for (p,q) in rawedges: if p==x: i.addChild(q) circles.append(i) return circles
Example input: rawnodes = [1,2,3,4,5,6] rawedges = [(1,2),(1,3),(1,4),(2,4),(1,5),(5,6)] Returns an array of Circle objects with attribute child arrays populated.
def calcPosition(self,parent_circle): ''' Position the circle tangent to the parent circle with the line connecting the centers of the two circles meeting the x axis at angle theta. ''' if r not in self: raise AttributeError("radius must be calculated before position.") if theta not in self: raise AttributeError("theta must be set before position can be calculated.") x_offset = math.cos(t_radians) * (parent_circle.r + self.r) y_offset = math.sin(t_radians) * (parent_circle.r + self.r) self.x = parent_circle.x + x_offset self.y = parent_circle.y + y_offsef calcPosition(self,parent_circle): ''' Position the circle tangent to the parent circle with the line connecting the centers of the two circles meeting the x axis at angle theta. ''' if r not in self: raise AttributeError("radius must be calculated before position.") if theta not in self: raise AttributeError("theta must be set before position can be calculated.") x_offset = math.cos(t_radians) * (parent_circle.r + self.r) y_offset = math.sin(t_radians) * (parent_circle.r + self.r) self.x = parent_circle.x + x_offset self.y = parent_circle.y + y_offset
Position the circle tangent to the parent circle with the line connecting the centers of the two circles meeting the x axis at angle theta.
def upload_progress(request): if 'X-Progress-ID' in request.GET: progress_id = request.GET['X-Progress-ID'] elif 'X-Progress-ID' in request.META: progress_id = request.META['X-Progress-ID'] if progress_id: cache_key = "%s_%s" % (request.META['REMOTE_ADDR'], progress_id) data = cache.get(cache_key) return HttpResponse(simplejson.dumps(data))
Used by Ajax calls Return the upload progress and total length values
def pre_save(self, model_instance, add): value = super(UserField, self).pre_save(model_instance, add) if not value and not add: # fall back to OS user if not accessing through browser # better than nothing ... value = self.get_os_username() setattr(model_instance, self.attname, value) return value return value
Updates username created on ADD only.
def sys_toolbox_dir(): return os.path.join(os.path.dirname(os.path.abspath(__file__)), 'esri', 'toolboxes')
Returns this site-package esri toolbox directory.
def appdata_roaming_dir(): install = arcpy.GetInstallInfo('desktop') app_data = arcpy.GetSystemEnvironment("APPDATA") product_dir = ''.join((install['ProductName'], major_version())) return os.path.join(app_data, 'ESRI', product_dir)
Returns the roaming AppData directory for the installed ArcGIS Desktop.
def route(self, uri, methods=frozenset({'GET'}), host=None, strict_slashes=False, stream=False, websocket=False): '''Decorate a function to be registered as a route :param uri: path of the URL :param methods: list or tuple of methods allowed :param host: :param strict_slashes: :param stream: :return: decorated function ''' # Fix case where the user did not prefix the URL with a / # and will probably get confused as to why it's not working if not uri.startswith('/'): uri = '/' + uri def response(handler): if websocket: handler.is_websocket = True elif stream: handler.is_stream = True self.router.add(uri=uri, methods=methods, handler=handler, host=host, strict_slashes=strict_slashes) return handler return responsf route(self, uri, methods=frozenset({'GET'}), host=None, strict_slashes=False, stream=False, websocket=False): '''Decorate a function to be registered as a route :param uri: path of the URL :param methods: list or tuple of methods allowed :param host: :param strict_slashes: :param stream: :return: decorated function ''' # Fix case where the user did not prefix the URL with a / # and will probably get confused as to why it's not working if not uri.startswith('/'): uri = '/' + uri def response(handler): if websocket: handler.is_websocket = True elif stream: handler.is_stream = True self.router.add(uri=uri, methods=methods, handler=handler, host=host, strict_slashes=strict_slashes) return handler return response
Decorate a function to be registered as a route :param uri: path of the URL :param methods: list or tuple of methods allowed :param host: :param strict_slashes: :param stream: :return: decorated function
def middleware(self, middleware_or_request): '''Decorate and register middleware to be called before a request. Can either be called as @app.middleware or @app.middleware('request') ''' def register_middleware(middleware, attach_to='request'): if attach_to == 'request': self.request_middleware.append(middleware) if attach_to == 'response': self.response_middleware.appendleft(middleware) return middleware # Detect which way this was called, @middleware or @middleware('AT') if callable(middleware_or_request): return register_middleware(middleware_or_request) else: return partial(register_middleware, attach_to=middleware_or_requestf middleware(self, middleware_or_request): '''Decorate and register middleware to be called before a request. Can either be called as @app.middleware or @app.middleware('request') ''' def register_middleware(middleware, attach_to='request'): if attach_to == 'request': self.request_middleware.append(middleware) if attach_to == 'response': self.response_middleware.appendleft(middleware) return middleware # Detect which way this was called, @middleware or @middleware('AT') if callable(middleware_or_request): return register_middleware(middleware_or_request) else: return partial(register_middleware, attach_to=middleware_or_request)
Decorate and register middleware to be called before a request. Can either be called as @app.middleware or @app.middleware('request')
def static(self, uri, file_or_directory, pattern=r'/?.+', use_modified_since=True, use_content_range=False): '''Register a root to serve files from. The input can either be a file or a directory. See ''' static_register(self, uri, file_or_directory, pattern, use_modified_since, use_content_rangef static(self, uri, file_or_directory, pattern=r'/?.+', use_modified_since=True, use_content_range=False): '''Register a root to serve files from. The input can either be a file or a directory. See ''' static_register(self, uri, file_or_directory, pattern, use_modified_since, use_content_range)
Register a root to serve files from. The input can either be a file or a directory. See
def blueprint(self, blueprint, **options): '''Register a blueprint on the application. :param blueprint: Blueprint object :param options: option dictionary with blueprint defaults :return: Nothing ''' if blueprint.name in self.blueprints: assert self.blueprints[blueprint.name] is blueprint, \ 'A blueprint with the name "%s" is already registered. ' \ 'Blueprint names must be unique.' % \ (blueprint.name,) else: self.blueprints[blueprint.name] = blueprint self._blueprint_order.append(blueprint) blueprint.register(self, optionsf blueprint(self, blueprint, **options): '''Register a blueprint on the application. :param blueprint: Blueprint object :param options: option dictionary with blueprint defaults :return: Nothing ''' if blueprint.name in self.blueprints: assert self.blueprints[blueprint.name] is blueprint, \ 'A blueprint with the name "%s" is already registered. ' \ 'Blueprint names must be unique.' % \ (blueprint.name,) else: self.blueprints[blueprint.name] = blueprint self._blueprint_order.append(blueprint) blueprint.register(self, options)
Register a blueprint on the application. :param blueprint: Blueprint object :param options: option dictionary with blueprint defaults :return: Nothing
def i2osp(self, long_integer, block_size): 'Convert a long integer into an octet string.' hex_string = '%X' % long_integer if len(hex_string) > 2 * block_size: raise ValueError('integer %i too large to encode in %i octets' % (long_integer, block_size)) return a2b_hex(hex_string.zfill(2 * block_size)f i2osp(self, long_integer, block_size): 'Convert a long integer into an octet string.' hex_string = '%X' % long_integer if len(hex_string) > 2 * block_size: raise ValueError('integer %i too large to encode in %i octets' % (long_integer, block_size)) return a2b_hex(hex_string.zfill(2 * block_size))
Convert a long integer into an octet string.
def export_xlsx(wb, output, fn): wb.close() output.seek(0) response = HttpResponse(output.read(), content_type="application/vnd.ms-excel") cd = codecs.encode('attachment;filename=%s' % fn, 'utf-8') response['Content-Disposition'] = cd return response
export as excel wb: output: fn: file name
def xlsw_write_row(ws, row_idx, row, fmt=None): for col_idx in range(len(row)): ws.write(row_idx, col_idx, row[col_idx], fmt) row_idx += 1 return row_idx
ws: row_idx: row number row: a list, data to write fmt: format for cell
def simple_export2xlsx(filename, titles, qs, func_data): output = BytesIO() wb = xlwt.Workbook(output) ws = wb.add_worksheet(filename) header_fmt = wb.add_format() header_fmt.set_bg_color('#C4D89E') row_idx = 0 row_idx = xlsw_write_row(ws, row_idx, titles, header_fmt) for o in qs: row_idx = xlsw_write_row(ws, row_idx, func_data(o)) fn = '%s-%s.xlsx' % (filename, datetime.now()) return export_xlsx(wb, output, fn)
export as excel filename: file name titles: title for this table qs: queryset to export func_data: a function to format object to list. ex: `lambda o: [o.pk, o.name]`
def add(self, iterable): item1 = item2 = MarkovChain.START for item3 in iterable: self[(item1, item2)].add_side(item3) item1 = item2 item2 = item3 self[(item1, item2)].add_side(MarkovChain.END)
Insert an iterable (pattern) item into the markov chain. The order of the pattern will define more of the chain.
def random_output(self, max=100): output = [] item1 = item2 = MarkovChain.START for i in range(max-3): item3 = self[(item1, item2)].roll() if item3 is MarkovChain.END: break output.append(item3) item1 = item2 item2 = item3 return output
Generate a list of elements from the markov chain. The `max` value is in place in order to prevent excessive iteration.
def start(self): if self._isRunning: return if self._cease.is_set(): self._cease.clear() # restart class Runner(threading.Thread): @classmethod def run(cls): nextRunAt = cls.setNextRun() while not self._cease.is_set(): if datetime.now() >= nextRunAt: self._run() nextRunAt = cls.setNextRun() @classmethod def setNextRun(cls): # return datetime.now() + timedelta(seconds=self._interval) return self._interval.nextRunAt() runner = Runner() runner.setDaemon(True) runner.start() self._isRunning = True
Start the periodic runner
def useThis(self, *args, **kwargs): self._callback = functools.partial(self._callback, *args, **kwargs)
Change parameter of the callback function. :param *args, **kwargs: parameter(s) to use when executing the callback function.
def stop(self): self._cease.set() time.sleep(0.1) # let the thread closing correctly. self._isRunning = False
Stop the periodic runner
def force_slash(fn): @wraps(fn) def wrapped(*args, **kwargs): if request.environ['PATH_INFO'].endswith('/'): return fn(*args, **kwargs) else: redirect(request.environ['SCRIPT_NAME'] + request.environ['PATH_INFO'] + '/') return wrapped
Force Slash ----------- Wrap a bottle route with this decorator to force a trailing slash. This is useful for the root of your application or places where TrailingSlash doesn't work
def condition(self) -> bool: jwt = JWT() if jwt.verify_http_auth_token(): if not current_app.config['AUTH']['FAST_SESSIONS']: session = SessionModel.where_session_id( jwt.data['session_id']) if session is None: return False Session.set_current_session(jwt.data['session_id']) return True return False
check JWT, then check session for validity
def render_hidden(name, value): if isinstance(value, list): return MultipleHiddenInput().render(name, value) return HiddenInput().render(name, value)
render as hidden widget
def get_only_selected_choices(self, value): schoices = self.choices selected_choices = set([force_text(v) for v in value if v]) if isinstance(schoices, ModelChoiceIterator): schoices.queryset = schoices.queryset.filter(pk__in=selected_choices) else: schoices = [e for e in schoices if force_text(e) in selected_choices] return schoices
Return a list of optgroups for this widget.
def next_task(self, item, raise_exceptions=None, **kwargs): filename = os.path.basename(item) batch = self.get_batch(filename) tx_deserializer = self.tx_deserializer_cls( allow_self=self.allow_self, override_role=self.override_role ) try: tx_deserializer.deserialize_transactions( transactions=batch.saved_transactions ) except (DeserializationError, TransactionDeserializerError) as e: raise TransactionsFileQueueError(e) from e else: batch.close() self.archive(filename)
Deserializes all transactions for this batch and archives the file.
def get_batch(self, filename=None): try: history = self.history_model.objects.get(filename=filename) except self.history_model.DoesNotExist as e: raise TransactionsFileQueueError( f"Batch history not found for '{filename}'." ) from e if history.consumed: raise TransactionsFileQueueError( f"Batch closed for '{filename}'. Got consumed=True" ) batch = self.batch_cls() batch.batch_id = history.batch_id batch.filename = history.filename return batch
Returns a batch instance given the filename.
def get_items(self, html): captcha_patterns = [ "https://www.google.com/recaptcha/api.js", "I'm not a robot"] for captcha_pattern in captcha_patterns: if captcha_pattern in html: raise exc.CaptchaError("Found %r in html!" % captcha_pattern) data = list() soup = self.to_soup(html) div = soup.find("div", class_="zsg-lg-1-2 zsg-sm-1-1") for li in div.find_all("li"): a = li.find_all("a")[0] href = a["href"] name = a.text.strip() data.append((href, name)) return data
Get state, county, zipcode, address code from lists page. Example: target url: http://www.zillow.com/browse/homes/md/ <<<<<<< HEAD data: ``[(href, name), ...]`` ======= data: [(href, name)] >>>>>>> 4507a26c6cc47e0affe1f7000f912e536c45212b
def __copy_tree(src_dir, dest_dir): if not os.path.exists(dest_dir): os.makedirs(dest_dir) shutil.copystat(src_dir, dest_dir) for entry in os.listdir(src_dir): from_path = os.path.join(src_dir, entry) to_path = os.path.join(dest_dir, entry) if os.path.isdir(from_path): __copy_tree(from_path, to_path) else: shutil.copy2(from_path, to_path)
The shutil.copytree() or distutils.dir_util.copy_tree() will happen to report error list below if we invoke it again and again ( at least in python 2.7.4 ): IOError: [Errno 2] No such file or directory: ... So we have to write our's copy_tree() for that purpose.
def get_time(self, loc4d=None): if loc4d is None: raise ValueError("Location4D object can not be None") if self.pattern == self.PATTERN_CYCLE: c = SunCycles.cycles(loc=loc4d) if self.cycle == self.CYCLE_SUNRISE: r = c[SunCycles.RISING] elif self.cycle == self.CYCLE_SUNSET: r = c[SunCycles.SETTING] td = timedelta(hours=self.time_delta) if self.plus_or_minus == self.HOURS_PLUS: r = r + td elif self.plus_or_minus == self.HOURS_MINUS: r = r - td return r elif self.pattern == self.PATTERN_SPECIFICTIME: return self._time.replace(year=loc4d.time.year, month=loc4d.time.month, day=loc4d.time.day)
Based on a Location4D object and this Diel object, calculate the time at which this Diel migration is actually happening
def make_relationship_aggregate(self, relationship): if not self._session.IS_MANAGING_BACKREFERENCES: relationship.direction &= ~RELATIONSHIP_DIRECTIONS.REVERSE return RelationshipAggregate(self, relationship)
Returns a new relationship aggregate for the given relationship. :param relationship: Instance of :class:`everest.entities.relationship.DomainRelationship`.
def get_request_body_chunk(self, content: bytes, closed: bool, more_content: bool) -> Dict[str, Any]: ''' http://channels.readthedocs.io/en/stable/asgi/www.html#request-body-chunk ''' return { 'content': content, 'closed': closed, 'more_content': more_content f get_request_body_chunk(self, content: bytes, closed: bool, more_content: bool) -> Dict[str, Any]: ''' http://channels.readthedocs.io/en/stable/asgi/www.html#request-body-chunk ''' return { 'content': content, 'closed': closed, 'more_content': more_content }
http://channels.readthedocs.io/en/stable/asgi/www.html#request-body-chunk
def snp_query(G, bim, Isnp): r bim_out = bim[Isnp].reset_index(drop=True) G_out = G[bim_out.i.values] bim_out.i = pd.Series(sp.arange(bim_out.shape[0]), index=bim_out.index) return G_out, bim_out
r""" Parameters ---------- G : (`n_snps`, `n_inds`) array Genetic data bim : pandas.DataFrame Variant annotation Isnp : bool array Variant filter Returns ------- G_out : (`n_snps`, `n_inds`) array filtered genetic data bim_out : dataframe filtered variant annotation
def is_in(bim, geno_range): r Ichrom = bim.chrom == geno_range[0] Ipos1 = bim.pos >= geno_range[1] Ipos2 = bim.pos < geno_range[2] return Ichrom & Ipos1 & Ipos2
r""" Parameters ---------- bim : pandas.DataFrame Variant annotation geno_range : tuple (chrom, pos_start, pos_end) Returns ------- Isnp : bool array Variant filter
def standardize_snps(G): r mean = G.mean(0) std = G.std(0) return (G - mean) / std
r""" Standardize variantes. Parameters ---------- G : (`n_inds`, `n_snps`) array Genetic data Returns ------- G_out : standardized array
def unique_variants(G): r _s = sp.dot(sp.rand(G.shape[0]), G) v, ix = sp.unique(_s, return_index=True) ix = sp.sort(ix) G_out = G[:, ix] return G_out, ix
r""" Filters out variants with the same genetic profile. Parameters ---------- G : (`n_inds`, `n_snps`) array Genetic data Returns ------- G_out : (`n_inds`, `n_unique_snps`) array filtered genetic data idxs : int array indexes of the the unique variants
def register_new(self, entity_class, entity): EntityState.manage(entity, self) EntityState.get_state(entity).status = ENTITY_STATUS.NEW self.__entity_set_map[entity_class].add(entity)
Registers the given entity for the given class as NEW. :raises ValueError: If the given entity already holds state that was created by another Unit Of Work.
def register_clean(self, entity_class, entity): EntityState.manage(entity, self) EntityState.get_state(entity).status = ENTITY_STATUS.CLEAN self.__entity_set_map[entity_class].add(entity)
Registers the given entity for the given class as CLEAN. :returns: Cloned entity.
def register_deleted(self, entity_class, entity): EntityState.manage(entity, self) EntityState.get_state(entity).status = ENTITY_STATUS.DELETED self.__entity_set_map[entity_class].add(entity)
Registers the given entity for the given class as DELETED. :raises ValueError: If the given entity already holds state that was created by another Unit Of Work.
def unregister(self, entity_class, entity): EntityState.release(entity, self) self.__entity_set_map[entity_class].remove(entity)
Unregisters the given entity for the given class and discards its state information.
def is_marked_new(self, entity): try: result = EntityState.get_state(entity).status == ENTITY_STATUS.NEW except ValueError: result = False return result
Checks if the given entity is marked with status NEW. Returns `False` if the entity has no state information.
def is_marked_deleted(self, entity): try: result = EntityState.get_state(entity).status \ == ENTITY_STATUS.DELETED except ValueError: result = False return result
Checks if the given entity is marked with status DELETED. Returns `False` if the entity has no state information.
def mark_clean(self, entity): state = EntityState.get_state(entity) state.status = ENTITY_STATUS.CLEAN state.is_persisted = True
Marks the given entity as CLEAN. This is done when an entity is loaded fresh from the repository or after a commit.
def iterator(self): # FIXME: There is no dependency tracking; objects are iterated in # random order. for ent_cls in list(self.__entity_set_map.keys()): for ent in self.__entity_set_map[ent_cls]: yield EntityState.get_state(ent)
Returns an iterator over all entity states held by this Unit Of Work.
def reset(self): for ents in self.__entity_set_map.values(): for ent in ents: EntityState.release(ent, self) self.__entity_set_map.clear()
Releases all entities held by this Unit Of Work (i.e., removes state information from all registered entities and clears the entity map).
def add_field(self, name, default=None, required=False, error=None): if name is None: return self.field_arguments.append(dict( name=name, default=default, required=required, error=error))
Add a text/non-file field to parse for a value in the request
def add_file(self, name, required=False, error=None, extensions=None): if name is None: return self.file_arguments.append(dict( name=name, required=required, error=error, extensions=extensions))
Add a file field to parse on request (uploads)
def file_save(self, name, filename=None, folder="", keep_ext=True) -> bool: if name in self.files: file_object = self.files[name] clean_filename = secure_filename(file_object.filename) if filename is not None and keep_ext: clean_filename = filename + ".%s" % \ (clean_filename.rsplit('.', 1)[1].lower()) elif filename is not None and not keep_ext: clean_filename = filename file_object.save(os.path.join( current_app.config['UPLOADS']['FOLDER'], folder, clean_filename)) return None
Easy save of a file
def parse(self, fail_callback=None): # get text fields for field in self.field_arguments: self.values[field['name']] = self.__get_value(field['name']) if self.values[field['name']] is None and field['required']: if fail_callback is not None: fail_callback() self.__invalid_request(field['error']) # get file fields for file in self.file_arguments: self.files[file['name']] = self.__get_file(file) if self.files[file['name']] is None and file['required']: if fail_callback is not None: fail_callback() self.__invalid_request(file['error'])
Parse text fields and file fields for values and files
def __get_value(self, field_name): value = request.values.get(field_name) if value is None: if self.json_form_data is None: value = None elif field_name in self.json_form_data: value = self.json_form_data[field_name] return value
Get request Json value by field name
def __get_file(self, file): file_object = None if file['name'] in request.files: file_object = request.files[file['name']] clean_filename = secure_filename(file_object.filename) if clean_filename == '': return file_object if file_object and self.__allowed_extension( clean_filename, file['extensions']): return file_object elif file['name'] not in request.files and file['required']: return file_object return file_object
Get request file and do a security check
def __allowed_extension(self, filename, extensions): allowed_extensions = current_app.config['UPLOADS']['EXTENSIONS'] if extensions is not None: allowed_extensions = extensions return '.' in filename and filename.rsplit('.', 1)[1].lower() in \ allowed_extensions
Check allowed file extensions
def __invalid_request(self, error): # TODO: make this modifiable error = { 'error': { 'message': error } } abort(JsonResponse(status_code=400, data=error))
Error response on failure
def get_field(self, offset, length, format): return struct.unpack(format, self.data[offset:offset + length])[0]
Returns unpacked Python struct array. Args: offset (int): offset to byte array within structure length (int): how many bytes to unpack format (str): Python struct format string for unpacking See Also: https://docs.python.org/2/library/struct.html#format-characters
def get_string(self, offset, length): return struct.unpack(str(length) + "s", self.data[ offset:offset + length ])[0]
Returns string (length bytes) Args: offset (int): sring offset in byte array length (int): string length