code
stringlengths
52
7.75k
docs
stringlengths
1
5.85k
def _checkMissingParams(self, template, **kwargs): parameters = self.listFields(template) self._findMissingParams(parameters, **kwargs)
Check the missing parameters for rendering from the template file
def _checkMissingParamsFromWorkitem(self, copied_from, keep=False, **kwargs): parameters = self.listFieldsFromWorkitem(copied_from, keep=keep) self._findMissingParams(parameters, **kwargs)
Check the missing parameters for rendering directly from the copied workitem
def checkType(self, item_type, projectarea_id): self.log.debug("Checking the validity of workitem type: %s", item_type) try: project_area = self.getProjectAreaByID(projectarea_id) if project_area.getItemType(item_type): return True else: return False except (exception.NotFound, exception.BadValue): self.log.error("Invalid ProjectArea name") return False
Check the validity of :class:`rtcclient.workitem.Workitem` type :param item_type: the type of the workitem (e.g. Story/Defect/Epic) :param projectarea_id: the :class:`rtcclient.project_area.ProjectArea` id :return: `True` or `False` :rtype: bool
def _handle_resource_entry(self, resource_name, entry, projectarea_url=None, archived=False, filter_rule=None): if projectarea_url is not None: try: if (entry.get("rtc_cm:projectArea") .get("@rdf:resource")) != projectarea_url: return None except AttributeError: pass if filter_rule is not None: # match all the filter rules for frule in filter_rule: fattr, rdf_resource, fvalue = frule try: if rdf_resource is not None: frule_value = entry.get(fattr).get(rdf_resource) else: frule_value = entry.get(fattr) if frule_value != fvalue: return None except AttributeError: pass entry_archived = entry.get("rtc_cm:archived") if (entry_archived is not None and eval(entry_archived.capitalize()) != archived): return None if resource_name == "Subscriber": resource_cls = Member elif resource_name in ["Query", "RunQuery", "Parent", "Children"]: resource_cls = Workitem else: resource_cls = eval(resource_name) if resource_name in ["Workitem", "Query", "RunQuery", "Parent", "Children"]: resource_url = entry.get("@rdf:resource") resource_url = "/".join([self.url, "oslc/workitems", resource_url.split("/")[-1]]) else: resource_url = entry.get("@rdf:resource") resource = resource_cls(resource_url, self, raw_data=entry) return resource
:param filter_rule: a list of filter rules e.g. filter_rule = [("dc:creator", "@rdf:resource", "https://test.url:9443/jts/users/me%40mail"), ("dc:modified", None, "2013-08-28T02:06:26.516Z") ] only the entry matches all the rules will be kept
def queryWorkitems(self, query_str, projectarea_id=None, projectarea_name=None, returned_properties=None, archived=False): rp = returned_properties return self.query.queryWorkitems(query_str=query_str, projectarea_id=projectarea_id, projectarea_name=projectarea_name, returned_properties=rp, archived=archived)
Query workitems with the query string in a certain project area At least either of `projectarea_id` and `projectarea_name` is given :param query_str: a valid query string :param projectarea_id: the :class:`rtcclient.project_area.ProjectArea` id :param projectarea_name: the project area name :param returned_properties: the returned properties that you want. Refer to :class:`rtcclient.client.RTCClient` for more explanations :param archived: (default is False) whether the workitems are archived :return: a :class:`list` that contains the queried :class:`rtcclient.workitem.Workitem` objects :rtype: list
def getCommentByID(self, comment_id): # check the validity of comment id try: if isinstance(comment_id, bool): raise ValueError() if isinstance(comment_id, six.string_types): comment_id = int(comment_id) if not isinstance(comment_id, int): raise ValueError() except (ValueError, TypeError): raise exception.BadValue("Please input valid comment id") comment_url = "/".join([self.url, "rtc_cm:comments/%s" % comment_id]) try: return Comment(comment_url, self.rtc_obj) except HTTPError: self.log.error("Comment %s does not exist", comment_id) raise exception.BadValue("Comment %s does not exist" % comment_id)
Get the :class:`rtcclient.models.Comment` object by its id Note: the comment id starts from 0 :param comment_id: the comment id (integer or equivalent string) :return: the :class:`rtcclient.models.Comment` object :rtype: rtcclient.models.Comment
def addSubscriber(self, email): headers, raw_data = self._perform_subscribe() existed_flag, raw_data = self._add_subscriber(email, raw_data) if existed_flag: return self._update_subscribe(headers, raw_data) self.log.info("Successfully add a subscriber: %s for <Workitem %s>", email, self)
Add a subscriber to this workitem If the subscriber has already been added, no more actions will be performed. :param email: the subscriber's email
def addSubscribers(self, emails_list): if not hasattr(emails_list, "__iter__"): error_msg = "Input parameter 'emails_list' is not iterable" self.log.error(error_msg) raise exception.BadValue(error_msg) # overall flag existed_flags = False headers, raw_data = self._perform_subscribe() for email in emails_list: existed_flag, raw_data = self._add_subscriber(email, raw_data) existed_flags = existed_flags and existed_flag if existed_flags: return self._update_subscribe(headers, raw_data) self.log.info("Successfully add subscribers: %s for <Workitem %s>", emails_list, self)
Add subscribers to this workitem If the subscribers have already been added, no more actions will be performed. :param emails_list: a :class:`list`/:class:`tuple`/:class:`set` contains the the subscribers' emails
def removeSubscriber(self, email): headers, raw_data = self._perform_subscribe() missing_flag, raw_data = self._remove_subscriber(email, raw_data) if missing_flag: return self._update_subscribe(headers, raw_data) self.log.info("Successfully remove a subscriber: %s for <Workitem %s>", email, self)
Remove a subscriber from this workitem If the subscriber has not been added, no more actions will be performed. :param email: the subscriber's email
def removeSubscribers(self, emails_list): if not hasattr(emails_list, "__iter__"): error_msg = "Input parameter 'emails_list' is not iterable" self.log.error(error_msg) raise exception.BadValue(error_msg) # overall flag missing_flags = True headers, raw_data = self._perform_subscribe() for email in emails_list: missing_flag, raw_data = self._remove_subscriber(email, raw_data) missing_flags = missing_flags and missing_flag if missing_flags: return self._update_subscribe(headers, raw_data) self.log.info("Successfully remove subscribers: %s for <Workitem %s>", emails_list, self)
Remove subscribers from this workitem If the subscribers have not been added, no more actions will be performed. :param emails_list: a :class:`list`/:class:`tuple`/:class:`set` contains the the subscribers' emails
def getAction(self, action_name): self.log.debug("Try to get <Action %s>", action_name) if not isinstance(action_name, six.string_types) or not action_name: excp_msg = "Please specify a valid action name" self.log.error(excp_msg) raise exception.BadValue(excp_msg) actions = self._getActions(action_name=action_name) if actions is not None: action = actions[0] self.log.info("Find <Action %s>", action) return action self.log.error("No Action named %s", action_name) raise exception.NotFound("No Action named %s" % action_name)
Get the :class:`rtcclient.models.Action` object by its name :param action_name: the name/title of the action :return: the :class:`rtcclient.models.Action` object :rtype: rtcclient.models.Action
def getStates(self): cust_attr = (self.raw_data.get("rtc_cm:state") .get("@rdf:resource") .split("/")[-2]) return self.rtc_obj._get_paged_resources("State", projectarea_id=self.contextId, customized_attr=cust_attr, page_size="50")
Get all :class:`rtcclient.models.State` objects of this workitem :return: a :class:`list` contains all the :class:`rtcclient.models.State` objects :rtype: list
def getIncludedInBuilds(self): build_tag = ("rtc_cm:com.ibm.team.build.linktype.includedWorkItems." "com.ibm.team.build.common.link.includedInBuilds") return self.rtc_obj._get_paged_resources("IncludedInBuild", workitem_id=self.identifier, customized_attr=build_tag, page_size="5")
Get all :class:`rtcclient.models.IncludedInBuild` objects that have already included this workitem WARNING: If one of the IncludedInBuilds is removed or cannot be retrieved/found correctly, then 404 error will be raised. :return: a :class:`list` contains all the :class:`rtcclient.models.IncludedInBuild` objects :rtype: list
def getParent(self, returned_properties=None): parent_tag = ("rtc_cm:com.ibm.team.workitem.linktype." "parentworkitem.parent") rp = returned_properties parent = (self.rtc_obj ._get_paged_resources("Parent", workitem_id=self.identifier, customized_attr=parent_tag, page_size="5", returned_properties=rp)) # No more than one parent if parent: # only one element return parent[0] return None
Get the parent workitem of this workitem If no parent, None will be returned. :param returned_properties: the returned properties that you want. Refer to :class:`rtcclient.client.RTCClient` for more explanations :return: a :class:`rtcclient.workitem.Workitem` object :rtype: rtcclient.workitem.Workitem
def getChildren(self, returned_properties=None): children_tag = ("rtc_cm:com.ibm.team.workitem.linktype." "parentworkitem.children") rp = returned_properties return (self.rtc_obj ._get_paged_resources("Children", workitem_id=self.identifier, customized_attr=children_tag, page_size="10", returned_properties=rp))
Get all the children workitems of this workitem If no children, None will be returned. :param returned_properties: the returned properties that you want. Refer to :class:`rtcclient.client.RTCClient` for more explanations :return: a :class:`rtcclient.workitem.Workitem` object :rtype: rtcclient.workitem.Workitem
def getChangeSets(self): changeset_tag = ("rtc_cm:com.ibm.team.filesystem.workitems." "change_set.com.ibm.team.scm.ChangeSet") return (self.rtc_obj ._get_paged_resources("ChangeSet", workitem_id=self.identifier, customized_attr=changeset_tag, page_size="10"))
Get all the ChangeSets of this workitem :return: a :class:`list` contains all the :class:`rtcclient.models.ChangeSet` objects :rtype: list
def addParent(self, parent_id): if isinstance(parent_id, bool): raise exception.BadValue("Please input a valid workitem id") if isinstance(parent_id, six.string_types): parent_id = int(parent_id) if not isinstance(parent_id, int): raise exception.BadValue("Please input a valid workitem id") self.log.debug("Try to add a parent <Workitem %s> to current " "<Workitem %s>", parent_id, self) headers = copy.deepcopy(self.rtc_obj.headers) headers["Content-Type"] = self.OSLC_CR_JSON req_url = "".join([self.url, "?oslc_cm.properties=com.ibm.team.workitem.", "linktype.parentworkitem.parent"]) parent_tag = ("rtc_cm:com.ibm.team.workitem.linktype." "parentworkitem.parent") parent_url = ("{0}/resource/itemName/com.ibm.team." "workitem.WorkItem/{1}".format(self.rtc_obj.url, parent_id)) parent_original = {parent_tag: [{"rdf:resource": parent_url}]} self.put(req_url, verify=False, proxies=self.rtc_obj.proxies, headers=headers, data=json.dumps(parent_original)) self.log.info("Successfully add a parent <Workitem %s> to current " "<Workitem %s>", parent_id, self)
Add a parent to current workitem Notice: for a certain workitem, no more than one parent workitem can be added and specified :param parent_id: the parent workitem id/number (integer or equivalent string)
def addChild(self, child_id): self.log.debug("Try to add a child <Workitem %s> to current " "<Workitem %s>", child_id, self) self._addChildren([child_id]) self.log.info("Successfully add a child <Workitem %s> to current " "<Workitem %s>", child_id, self)
Add a child to current workitem :param child_id: the child workitem id/number (integer or equivalent string)
def addChildren(self, child_ids): if not hasattr(child_ids, "__iter__"): error_msg = "Input parameter 'child_ids' is not iterable" self.log.error(error_msg) raise exception.BadValue(error_msg) self.log.debug("Try to add children <Workitem %s> to current " "<Workitem %s>", child_ids, self) self._addChildren(child_ids) self.log.info("Successfully add children <Workitem %s> to current " "<Workitem %s>", child_ids, self)
Add children to current workitem :param child_ids: a :class:`list` contains the children workitem id/number (integer or equivalent string)
def removeParent(self): self.log.debug("Try to remove the parent workitem from current " "<Workitem %s>", self) headers = copy.deepcopy(self.rtc_obj.headers) headers["Content-Type"] = self.OSLC_CR_JSON req_url = "".join([self.url, "?oslc_cm.properties=com.ibm.team.workitem.", "linktype.parentworkitem.parent"]) parent_tag = ("rtc_cm:com.ibm.team.workitem.linktype." "parentworkitem.parent") parent_original = {parent_tag: []} self.put(req_url, verify=False, proxies=self.rtc_obj.proxies, headers=headers, data=json.dumps(parent_original)) self.log.info("Successfully remove the parent workitem of current " "<Workitem %s>", self)
Remove the parent workitem from current workitem Notice: for a certain workitem, no more than one parent workitem can be added and specified
def removeChild(self, child_id): self.log.debug("Try to remove a child <Workitem %s> from current " "<Workitem %s>", child_id, self) self._removeChildren([child_id]) self.log.info("Successfully remove a child <Workitem %s> from " "current <Workitem %s>", child_id, self)
Remove a child from current workitem :param child_id: the child workitem id/number (integer or equivalent string)
def removeChildren(self, child_ids): if not hasattr(child_ids, "__iter__"): error_msg = "Input parameter 'child_ids' is not iterable" self.log.error(error_msg) raise exception.BadValue(error_msg) self.log.debug("Try to remove children <Workitem %s> from current " "<Workitem %s>", child_ids, self) self._removeChildren(child_ids) self.log.info("Successfully remove children <Workitem %s> from " "current <Workitem %s>", child_ids, self)
Remove children from current workitem :param child_ids: a :class:`list` contains the children workitem id/number (integer or equivalent string)
def addAttachment(self, filepath): proj_id = self.contextId fa = self.rtc_obj.getFiledAgainst(self.filedAgainst, projectarea_id=proj_id) fa_id = fa.url.split("/")[-1] headers = copy.deepcopy(self.rtc_obj.headers) if headers.__contains__("Content-Type"): headers.__delitem__("Content-Type") filename = os.path.basename(filepath) fileh = open(filepath, "rb") files = {"attach": (filename, fileh, "application/octet-stream")} params = {"projectId": proj_id, "multiple": "true", "category": fa_id} req_url = "".join([self.rtc_obj.url, "/service/com.ibm.team.workitem.service.", "internal.rest.IAttachmentRestService/"]) resp = self.post(req_url, verify=False, headers=headers, proxies=self.rtc_obj.proxies, params=params, files=files) raw_data = xmltodict.parse(resp.content) json_body = json.loads(raw_data["html"]["body"]["textarea"]) attachment_info = json_body["files"][0] return self._add_attachment_link(attachment_info)
Upload attachment to a workitem :param filepath: the attachment file path :return: the :class:`rtcclient.models.Attachment` object :rtype: rtcclient.models.Attachment
def getAttachments(self): attachment_tag = ("rtc_cm:com.ibm.team.workitem.linktype." "attachment.attachment") return (self.rtc_obj ._get_paged_resources("Attachment", workitem_id=self.identifier, customized_attr=attachment_tag, page_size="10"))
Get all :class:`rtcclient.models.Attachment` objects of this workitem :return: a :class:`list` contains all the :class:`rtcclient.models.Attachment` objects :rtype: list
def list_containers(active=True, defined=True, as_object=False, config_path=None): if config_path: if not os.path.exists(config_path): return tuple() try: entries = _lxc.list_containers(active=active, defined=defined, config_path=config_path) except ValueError: return tuple() else: try: entries = _lxc.list_containers(active=active, defined=defined) except ValueError: return tuple() if as_object: return tuple([Container(name, config_path) for name in entries]) else: return entries
List the containers on the system.
def attach_run_command(cmd): if isinstance(cmd, tuple): return _lxc.attach_run_command(cmd) elif isinstance(cmd, list): return _lxc.attach_run_command((cmd[0], cmd)) else: return _lxc.attach_run_command((cmd, [cmd]))
Run a command when attaching Please do not call directly, this will execvp the command. This is to be used in conjunction with the attach method of a container.
def arch_to_personality(arch): if isinstance(arch, bytes): arch = unicode(arch) return _lxc.arch_to_personality(arch)
Determine the process personality corresponding to the architecture
def add_device_net(self, name, destname=None): if not self.running: return False if os.path.exists("/sys/class/net/%s/phy80211/name" % name): with open("/sys/class/net/%s/phy80211/name" % name) as fd: phy = fd.read().strip() if subprocess.call(['iw', 'phy', phy, 'set', 'netns', str(self.init_pid)]) != 0: return False if destname: def rename_interface(args): old, new = args return subprocess.call(['ip', 'link', 'set', 'dev', old, 'name', new]) return self.attach_wait(rename_interface, (name, destname), namespaces=(CLONE_NEWNET)) == 0 return True if not destname: destname = name if not os.path.exists("/sys/class/net/%s/" % name): return False return subprocess.call(['ip', 'link', 'set', 'dev', name, 'netns', str(self.init_pid), 'name', destname]) == 0
Add network device to running container.
def append_config_item(self, key, value): return _lxc.Container.set_config_item(self, key, value)
Append 'value' to 'key', assuming 'key' is a list. If 'key' isn't a list, 'value' will be set as the value of 'key'.
def create(self, template=None, flags=0, args=()): if isinstance(args, dict): template_args = [] for item in args.items(): template_args.append("--%s" % item[0]) template_args.append("%s" % item[1]) else: template_args = args if template: return _lxc.Container.create(self, template=template, flags=flags, args=tuple(template_args)) else: return _lxc.Container.create(self, flags=flags, args=tuple(template_args))
Create a new rootfs for the container. "template" if passed must be a valid template name. "flags" (optional) is an integer representing the optional create flags to be passed. "args" (optional) is a tuple of arguments to pass to the template. It can also be provided as a dict.
def clone(self, newname, config_path=None, flags=0, bdevtype=None, bdevdata=None, newsize=0, hookargs=()): args = {} args['newname'] = newname args['flags'] = flags args['newsize'] = newsize args['hookargs'] = hookargs if config_path: args['config_path'] = config_path if bdevtype: args['bdevtype'] = bdevtype if bdevdata: args['bdevdata'] = bdevdata if _lxc.Container.clone(self, **args): return Container(newname, config_path=config_path) else: return False
Clone the current container.
def console(self, ttynum=-1, stdinfd=0, stdoutfd=1, stderrfd=2, escape=1): if not self.running: return False return _lxc.Container.console(self, ttynum, stdinfd, stdoutfd, stderrfd, escape)
Attach to console of running container.
def console_getfd(self, ttynum=-1): if not self.running: return False return _lxc.Container.console_getfd(self, ttynum)
Attach to console of running container.
def get_cgroup_item(self, key): value = _lxc.Container.get_cgroup_item(self, key) if value is False: return False else: return value.rstrip("\n")
Returns the value for a given cgroup entry. A list is returned when multiple values are set.
def get_config_item(self, key): value = _lxc.Container.get_config_item(self, key) if value is False: return False elif value.endswith("\n"): return value.rstrip("\n").split("\n") else: return value
Returns the value for a given config key. A list is returned when multiple values are set.
def get_keys(self, key=None): if key: value = _lxc.Container.get_keys(self, key) else: value = _lxc.Container.get_keys(self) if value is False: return False elif value.endswith("\n"): return value.rstrip("\n").split("\n") else: return value
Returns a list of valid sub-keys.
def get_ips(self, interface=None, family=None, scope=None, timeout=0): kwargs = {} if interface: kwargs['interface'] = interface if family: kwargs['family'] = family if scope: kwargs['scope'] = scope ips = None timeout = int(os.environ.get('LXC_GETIP_TIMEOUT', timeout)) while not ips: ips = _lxc.Container.get_ips(self, **kwargs) if timeout == 0: break timeout -= 1 time.sleep(1) return ips
Get a tuple of IPs for the container.
def rename(self, new_name): if _lxc.Container.rename(self, new_name): return Container(new_name) return False
Rename the container. On success, returns the new Container object. On failure, returns False.
def set_config_item(self, key, value): try: old_value = self.get_config_item(key) except KeyError: old_value = None # Get everything to unicode with python2 if isinstance(value, str): value = value.decode() elif isinstance(value, list): for i in range(len(value)): if isinstance(value[i], str): value[i] = value[i].decode() # Check if it's a list def set_key(key, value): self.clear_config_item(key) if isinstance(value, list): for entry in value: if not _lxc.Container.set_config_item(self, key, entry): return False else: _lxc.Container.set_config_item(self, key, value) set_key(key, value) new_value = self.get_config_item(key) # loglevel is special and won't match the string we set if key == "lxc.loglevel": new_value = value if (isinstance(value, unicode) and isinstance(new_value, unicode) and value == new_value): return True elif (isinstance(value, list) and isinstance(new_value, list) and set(value) == set(new_value)): return True elif (isinstance(value, unicode) and isinstance(new_value, list) and set([value]) == set(new_value)): return True elif old_value: set_key(key, old_value) return False else: self.clear_config_item(key) return False
Set a config key to a provided value. The value can be a list for the keys supporting multiple values.
def wait(self, state, timeout=-1): if isinstance(state, str): state = state.upper() return _lxc.Container.wait(self, state, timeout)
Wait for the container to reach a given state or timeout.
def render(self, template, **kwargs): try: temp = self.environment.get_template(template) return temp.render(**kwargs) except AttributeError: err_msg = "Invalid value for 'template'" self.log.error(err_msg) raise exception.BadValue(err_msg)
Renders the template :param template: The template to render. The template is actually a file, which is usually generated by :class:`rtcclient.template.Templater.getTemplate` and can also be modified by user accordingly. :param kwargs: The `kwargs` dict is used to fill the template. These two parameter are mandatory: * description * title Some of below parameters (which may not be included in some customized workitem type ) are mandatory if `keep` (parameter in :class:`rtcclient.template.Templater.getTemplate`) is set to `False`; Optional for otherwise. * teamArea (Team Area) * ownedBy (Owned By) * plannedFor(Planned For) * severity(Severity) * priority(Priority) * filedAgainst(Filed Against) Actually all these needed keywords/attributes/fields can be retrieved by :class:`rtcclient.template.Templater.listFields` :return: the :class:`string` object :rtype: string
def renderFromWorkitem(self, copied_from, keep=False, encoding="UTF-8", **kwargs): temp = jinja2.Template(self.getTemplate(copied_from, template_name=None, template_folder=None, keep=keep, encoding=encoding)) return temp.render(**kwargs)
Render the template directly from some to-be-copied :class:`rtcclient.workitem.Workitem` without saving to a file :param copied_from: the to-be-copied :class:`rtcclient.workitem.Workitem` id :param keep (default is False): If `True`, some of the below fields will remain unchangeable with the to-be-copied :class:`rtcclient.workitem.Workitem`. Otherwise for `False`. * teamArea (Team Area) * ownedBy (Owned By) * plannedFor(Planned For) * severity(Severity) * priority(Priority) * filedAgainst(Filed Against) :param encoding (default is "UTF-8"): coding format :param kwargs: The `kwargs` dict is used to fill the template. These two parameter are mandatory: * description * title Some of below parameters (which may not be included in some customized workitem type ) are mandatory if `keep` is set to `False`; Optional for otherwise. * teamArea (Team Area) * ownedBy (Owned By) * plannedFor(Planned For) * severity(Severity) * priority(Priority) * filedAgainst(Filed Against) Actually all these needed keywords/attributes/fields can be retrieved by :class:`rtcclient.template.Templater.listFieldsFromWorkitem` :return: the :class:`string` object :rtype: string
def listFields(self, template): try: temp_source = self.environment.loader.get_source(self.environment, template) return self.listFieldsFromSource(temp_source) except AttributeError: err_msg = "Invalid value for 'template'" self.log.error(err_msg) raise exception.BadValue(err_msg)
List all the attributes to be rendered from the template file :param template: The template to render. The template is actually a file, which is usually generated by :class:`rtcclient.template.Templater.getTemplate` and can also be modified by user accordingly. :return: a :class:`set` contains all the needed attributes :rtype: set
def listFieldsFromWorkitem(self, copied_from, keep=False): temp_source = self.getTemplate(copied_from, template_name=None, template_folder=None, keep=keep) return self.listFieldsFromSource(temp_source)
List all the attributes to be rendered directly from some to-be-copied :class:`rtcclient.workitem.Workitem` :param copied_from: the to-be-copied :class:`rtcclient.workitem.Workitem` id :param keep: (default is False) If `True`, some of below parameters (which will not be included in some customized :class:`rtcclient.workitem.Workitem` type ) will remain unchangeable with the to-be-copied :class:`rtcclient.workitem.Workitem`. Otherwise for `False`. * teamArea (Team Area) * ownedBy (Owned By) * plannedFor(Planned For) * severity(Severity) * priority(Priority) * filedAgainst(Filed Against) :return: a :class:`set` contains all the needed attributes :rtype: set
def listFieldsFromSource(self, template_source): ast = self.environment.parse(template_source) return jinja2.meta.find_undeclared_variables(ast)
List all the attributes to be rendered directly from template source :param template_source: the template source (usually represents the template content in string format) :return: a :class:`set` contains all the needed attributes :rtype: set
def _remove_long_fields(self, wk_raw_data): match_str_list = ["rtc_cm:com.ibm.", "calm:"] for key in wk_raw_data.keys(): for match_str in match_str_list: if key.startswith(match_str): try: wk_raw_data.pop(key) self.log.debug("Successfully remove field [%s] from " "the template", key) except: self.log.warning("Cannot remove field [%s] from the " "template", key) continue
Remove long fields: These fields are can only customized after the workitems are created
def getChanges(self): identifier = self.url.split("/")[-1] resource_url = "/".join(["%s" % self.rtc_obj.url, "resource/itemOid", "com.ibm.team.scm.ChangeSet", "%s?_mediaType=text/xml" % identifier]) resp = self.get(resource_url, verify=False, proxies=self.rtc_obj.proxies, headers=self.rtc_obj.headers) raw_data = xmltodict.parse(resp.content).get("scm:ChangeSet") common_changes = dict() changes = raw_data.get("changes") for (key, value) in raw_data.items(): if key.startswith("@"): continue if "changes" != key: common_changes[key] = value return self._handle_changes(changes, common_changes)
Get all :class:`rtcclient.models.Change` objects in this changeset :return: a :class:`list` contains all the :class:`rtcclient.models.Change` objects :rtype: list
def fetchBeforeStateFile(self, file_folder): if u"true" == self.before: self.log.info("This file is newly added. No previous file") else: self.log.info("Fetching initial file of this Change<%s>:" % self) return self._fetchFile(self.before, file_folder, override=False)
Fetch the initial file (before the change) to a folder If the file is newly added, then `None` will be returned. :param file_folder: the folder to store the file :return: the :class:`string` object :rtype: string
def fetchAfterStateFile(self, file_folder): if u"true" == self.after: self.log.info("This file has been deleted successfully.") else: self.log.info("Fetching final file of this Change<%s>:" % self) return self._fetchFile(self.after, file_folder)
Fetch the final file (after the change) to a folder If the file has been deleted, then `None` will be returned. :param file_folder: the folder to store the file :return: the :class:`string` object :rtype: string
def table_dump(self, table): if not table: raise ValueError("no table") print('------- dumping table {}'.format(table)) pipes = ["gzip"] outfile_path = self._get_outfile_path(table) cmd = self._get_args( "pg_dump", "--table={}".format(table), #"--data-only", "--clean", "--no-owner", "--column-inserts", ) cmd = ' '.join(cmd) cmd += ' | {}'.format(' | '.join(pipes)) cmd += ' > {}'.format(outfile_path) self._run_cmd(cmd) print('------- dumped table {}'.format(table)) return True
dump all the rows of the given table name
def _get_file(self): ''' return an opened tempfile pointer that can be used http://docs.python.org/2/library/tempfile.html ''' f = tempfile.NamedTemporaryFile(delete=False) self.tmp_files.add(f.name) return f _get_file(self): ''' return an opened tempfile pointer that can be used http://docs.python.org/2/library/tempfile.html ''' f = tempfile.NamedTemporaryFile(delete=False) self.tmp_files.add(f.name) return f
return an opened tempfile pointer that can be used http://docs.python.org/2/library/tempfile.html
def _get_args(self, executable, *args): args = list(args) args.insert(0, executable) if self.username: args.append("--username={}".format(self.username)) if self.host: args.append("--host={}".format(self.host)) if self.port: args.append("--port={}".format(self.port)) args.append(self.dbname) #args.extend(other_args) return args
compile all the executable and the arguments, combining with common arguments to create a full batch of command args
def _get_outfile_path(self, table): self.outfile_count += 1 outfile = os.path.join(self.directory, '{:03d}_{}.sql.gz'.format(self.outfile_count, table)) return outfile
return the path for a file we can use to back up the table
def _run_queries(self, queries, *args, **kwargs): # write out all the commands to a temp file and then have psql run that file f = self._get_file() for q in queries: f.write("{};\n".format(q)) f.close() psql_args = self._get_args('psql', '-X', '-f {}'.format(f.name)) return self._run_cmd(' '.join(psql_args), *args, **kwargs)
run the queries queries -- list -- the queries to run return -- string -- the results of the query?
def _restore_auto_increment(self, table): query, seq_table, seq_column, seq_name = self._get_auto_increment_info(table) if query: queries = [query, "select nextval('{}')".format(seq_name)] return self._run_queries(queries)
restore the auto increment value for the table to what it was previously
def _get_auto_increment_info(self, table): query = '' seq_table = '' seq_column = '' seq_name = '' find_query = "\n".join([ "SELECT", " t.relname as related_table,", " a.attname as related_column,", " s.relname as sequence_name", "FROM pg_class s", "JOIN pg_depend d ON d.objid = s.oid", "JOIN pg_class t ON d.objid = s.oid AND d.refobjid = t.oid", "JOIN pg_attribute a ON (d.refobjid, d.refobjsubid) = (a.attrelid, a.attnum)", "JOIN pg_namespace n ON n.oid = s.relnamespace", "WHERE", " s.relkind = 'S'", "AND", " n.nspname = 'public'", "AND", " t.relname = '{}'".format(table) ]) pipe = self._run_queries([find_query], popen_kwargs={'stdout': subprocess.PIPE}) stdout, stderr = pipe.communicate() if stdout: try: m = re.findall('^\s*(\S+)\s*\|\s*(\S+)\s*\|\s*(\S+)\s*$', stdout, flags=re.MULTILINE) seq_table, seq_column, seq_name = m[1] # http://www.postgresql.org/docs/9.2/static/functions-sequence.html # http://www.postgresql.org/docs/9.2/static/functions-conditional.html query = "\n".join([ "SELECT", " setval('{}',".format(seq_name.strip()), " coalesce(max({}), 1),".format(seq_column.strip()), " max({}) IS NOT null)".format(seq_column.strip()), "FROM \"{}\"".format(seq_table.strip()) ]) except IndexError: query = '' return query, seq_table, seq_column, seq_name
figure out the the autoincrement value for the given table
def restore(self): sql_files = [] for root, dirs, files in os.walk(self.directory): for f in files: if f.endswith(".sql.gz"): path = os.path.join(self.directory, f) self._run_cmd(["gunzip", path]) sql_files.append(f.rstrip(".gz")) elif f.endswith('.sql'): sql_files.append(f) sql_files.sort() # we want to go in the order the tables were dumped r = re.compile('\d{3,}_([^\.]+)') for f in sql_files: path = os.path.join(self.directory, f) m = r.match(f) if m: table = m.group(1) logger.info('------- restoring table {}'.format(table)) #psql_args = self._get_args('psql', '-X', '--echo-queries', '-f {}'.format(path)) psql_args = self._get_args('psql', '-X', '--quiet', '--file={}'.format(path)) self._run_cmd(psql_args) logger.info('------- restored table {}'.format(table)) return True
use the self.directory to restore a db NOTE -- this will only restore a database dumped with one of the methods of this class
def table_dump(self, table): if not table: raise ValueError("no table") cmds = [] logger.info('------- dumping table {}'.format(table)) cmd = self._get_args( "pg_dump", "--table={}".format(table), #"--data-only", "--clean", "--no-owner", "--column-inserts", ) cmds.append((cmd, {})) outfile_path = self._get_outfile_path(table) cmds.append(('gzip > "{}"'.format(outfile_path), {"shell": True})) #cmd += ' | {}'.format(' | '.join(pipes)) #cmd += ' > {}'.format(outfile_path) self._run_cmds(cmds) logger.info('------- dumped table {}'.format(table)) return True
dump all the rows of the given table name
def _get_env(self): if hasattr(self, 'env'): return self.env # create a temporary pgpass file pgpass = self._get_file() # format: http://www.postgresql.org/docs/9.2/static/libpq-pgpass.html pgpass.write('*:*:*:{}:{}\n'.format(self.username, self.password).encode("utf-8")) pgpass.close() self.env = dict(os.environ) self.env['PGPASSFILE'] = pgpass.name # we want to assure a consistent environment if 'PGOPTIONS' in self.env: del self.env['PGOPTIONS'] return self.env
this returns an environment dictionary we want to use to run the command this will also create a fake pgpass file in order to make it possible for the script to be passwordless
def _get_response(**kwargs): if 'code' not in kwargs: kwargs['code'] = 200 if 'headers' not in kwargs: kwargs['headers'] = dict() if 'version' not in kwargs: kwargs['version'] = 'HTTP/1.1' return dict(**kwargs)
Get a template response Use kwargs to add things to the dictionary
def _write_transport(self, string): if isinstance(string, str): # we need to convert to bytes self.transport.write(string.encode('utf-8')) else: self.transport.write(string)
Convenience function to write to the transport
def _write_response(self, response): status = '{} {} {}\r\n'.format(response['version'], response['code'], responses[response['code']]) self.logger.debug("Responding status: '%s'", status.strip()) self._write_transport(status) if 'body' in response and 'Content-Length' not in response['headers']: response['headers']['Content-Length'] = len(response['body']) response['headers']['Date'] = datetime.utcnow().strftime( "%a, %d %b %Y %H:%M:%S +0000") for (header, content) in response['headers'].items(): self.logger.debug("Sending header: '%s: %s'", header, content) self._write_transport('{}: {}\r\n'.format(header, content)) self._write_transport('\r\n') if 'body' in response: self._write_transport(response['body'])
Write the response back to the client Arguments: response -- the dictionary containing the response.
def connection_made(self, transport): self.logger.info('Connection made at object %s', id(self)) self.transport = transport self.keepalive = True if self._timeout: self.logger.debug('Registering timeout event') self._timout_handle = self._loop.call_later( self._timeout, self._handle_timeout)
Called when the connection is made
def connection_lost(self, exception): if exception: self.logger.exception('Connection lost!') else: self.logger.info('Connection lost')
Called when the connection is lost or closed. The argument is either an exception object or None. The latter means a regular EOF is received, or the connection was aborted or closed by this side of the connection.
def data_received(self, data): self.logger.debug('Received data: %s', repr(data)) try: request = self._parse_headers(data) self._handle_request(request) except InvalidRequestError as e: self._write_response(e.get_http_response()) if not self.keepalive: if self._timeout_handle: self._timeout_handle.cancel() self.transport.close() if self._timeout and self._timeout_handle: self.logger.debug('Delaying timeout event') self._timeout_handle.cancel() self._timout_handle = self._loop.call_later( self._timeout, self._handle_timeout)
Process received data from the socket Called when we receive data
def _get_request_uri(self, request): request_uri = request['target'] if request_uri.startswith('/'): # eg. GET /index.html return (request.get('Host', 'localhost').split(':')[0], request_uri[1:]) elif '://' in request_uri: # eg. GET http://rded.nl locator = request_uri.split('://', 1)[1] host, path = locator.split('/', 1) return (host.split(':')[0], path)
Parse the request URI into something useful Server MUST accept full URIs (5.1.2)
def get_http_response(self): return _get_response( code=self.code, body=str(self), headers={ 'Content-Type': 'text/plain' } )
Get this exception as an HTTP response suitable for output
def _start_server(bindaddr, port, hostname, folder): import asyncio from .httpserver import HttpProtocol loop = asyncio.get_event_loop() coroutine = loop.create_server(lambda: HttpProtocol(hostname, folder), bindaddr, port) server = loop.run_until_complete(coroutine) print('Starting server on {}'.format(server.sockets[0].getsockname())) try: loop.run_forever() except KeyboardInterrupt: pass
Starts an asyncio server
def run(argv=None): # pragma: no cover import sys import os import docopt import textwrap # Check for the version if not sys.version_info >= (3, 4): print('This python version is not supported. Please use python 3.4') exit(1) argv = argv or sys.argv[1:] # remove some RST formatting docblock = run.__doc__.replace('::', ':') args = docopt.docopt(textwrap.dedent(docblock), argv) if args['--version']: print("httpserver version {} by {}".format( __version__, __author__)) exit(0) # Set up logging level = logging.WARNING if args['--verbose']: level = logging.INFO if args['--debug']: level = logging.DEBUG logging.basicConfig(level=level) logger = logging.getLogger('run method') logger.debug('CLI args: %s' % args) bindaddr = args['--bindaddress'] or '127.0.0.1' port = args['--port'] or '8080' folder = args['<folder>'] or os.getcwd() hostname = args['--host'] or 'localhost' _start_server(bindaddr, port, hostname, folder)
Run the HTTP server Usage: httpserver [options] [<folder>] Options:: -h,--host=<hostname> What host name to serve (default localhost) -a,--bindaddress=<address> Address to bind to (default 127.0.0.1) -p,--port=<port> Port to listen on (default 8080) -v,--verbose Increase verbosity to INFO messages -d,--debug Increase verbosity to DEBUG messages --help Print this help message --version Print the version To serve /path/to/www on all (ipv4) addresses for host myserver on port 80:: httpserver -a 0.0.0.0 -p 80 -h myserver /path/to/www
def reading(self): try: # testing proxy proxies = {} try: proxies["http_proxy"] = os.environ['http_proxy'] except KeyError: pass try: proxies["https_proxy"] = os.environ['https_proxy'] except KeyError: pass if len(proxies) != 0: proxy = urllib2.ProxyHandler(proxies) opener = urllib2.build_opener(proxy) urllib2.install_opener(opener) # end testing f = urllib2.urlopen(self.link) return f.read() except (urllib2.URLError, ValueError): print("\n{0}Can't read the file '{1}'{2}".format( self.meta.color["RED"], self.link.split("/")[-1], self.meta.color["ENDC"])) return " "
Open url and read
def sbo(self, name): if (self.meta.rsl_deps in ["on", "ON"] and "--resolve-off" not in self.flag): sys.setrecursionlimit(10000) dependencies = [] requires = SBoGrep(name).requires() if requires: for req in requires: status(0.03) # toolbar_width = status(index, toolbar_width, 1) # avoid to add %README% as dependency and # if require in blacklist if "%README%" not in req and req not in self.blacklist: dependencies.append(req) if dependencies: self.dep_results.append(dependencies) for dep in dependencies: self.sbo(dep) return self.dep_results else: return []
Build all dependencies of a package
def find_package(find_pkg, directory): pkgs = [] if os.path.isdir(directory): installed = sorted(os.listdir(directory)) blacklist = BlackList().packages(pkgs=installed, repo="local") if os.path.exists(directory): for pkg in installed: if (not pkg.startswith(".") and pkg.startswith(find_pkg) and split_package(pkg)[0] not in blacklist): pkgs.append(pkg) return pkgs
Find packages
def add_sec_label(cell: NotebookNode, nbname) -> Sequence[NotebookNode]: assert cell.cell_type == 'markdown', cell.cell_type lines = cell.source.splitlines() if lines[0].startswith('# '): header_lines = 1 elif len(lines) > 1 and lines[1].startswith('==='): header_lines = 2 else: raise NoHeader header = '\n'.join(lines[:header_lines]) intro_remainder = '\n'.join(lines[header_lines:]).strip() res = [ new_markdown_cell(header), new_latex_cell('\label{sec:%s}' % nbname) ] res[0].metadata = cell.metadata if intro_remainder: res.append(new_markdown_cell(intro_remainder)) return res
Adds a Latex \\label{} under the chapter heading. This takes the first cell of a notebook, and expects it to be a Markdown cell starting with a level 1 heading. It inserts a label with the notebook name just underneath this heading.
def repos(self): def_cnt, cus_cnt = 0, 0 print("") self.msg.template(78) print("{0}{1}{2}{3}{4}{5}{6}".format( "| Repo id", " " * 2, "Repo URL", " " * 44, "Default", " " * 3, "Status")) self.msg.template(78) for repo_id, repo_URL in sorted(self.all_repos.iteritems()): status, COLOR = "disabled", self.meta.color["RED"] default = "yes" if len(repo_URL) > 49: repo_URL = repo_URL[:48] + "~" if repo_id in self.meta.repositories: def_cnt += 1 status, COLOR = "enabled", self.meta.color["GREEN"] if repo_id not in self.meta.default_repositories: cus_cnt += 1 default = "no" print(" {0}{1}{2}{3}{4}{5}{6}{7:>8}{8}".format( repo_id, " " * (9 - len(repo_id)), repo_URL, " " * (52 - len(repo_URL)), default, " " * (8 - len(default)), COLOR, status, self.meta.color["ENDC"])) print("\nRepositories summary") print("=" * 79) print("{0}{1}/{2} enabled default repositories and {3} custom.".format( self.meta.color["GREY"], def_cnt, len(self.all_repos), cus_cnt)) print("Edit the file '/etc/slpkg/repositories.conf' for enable " "and disable default\nrepositories or run 'slpkg " "repo-enable' command.\n{0}".format(self.meta.color["ENDC"])) raise SystemExit()
View or enabled or disabled repositories
def view(self): print("") # new line at start conf_args = [ "RELEASE", "SLACKWARE_VERSION", "COMP_ARCH", "BUILD_PATH", "PACKAGES", "PATCHES", "CHECKMD5", "DEL_ALL", "DEL_BUILD", "SBO_BUILD_LOG", "MAKEFLAGS", "DEFAULT_ANSWER", "REMOVE_DEPS_ANSWER", "SKIP_UNST", "RSL_DEPS", "DEL_DEPS", "USE_COLORS", "DOWNDER", "DOWNDER_OPTIONS", "SLACKPKG_LOG", "ONLY_INSTALLED", "PRG_BAR", "EDITOR", "NOT_DOWNGRADE" ] read_conf = Utils().read_file(self.config_file) for line in read_conf.splitlines(): if not line.startswith("#") and line.split("=")[0] in conf_args: print("{0}".format(line)) else: print("{0}{1}{2}".format(self.meta.color["CYAN"], line, self.meta.color["ENDC"])) print("")
View slpkg config file
def edit(self): subprocess.call("{0} {1}".format(self.meta.editor, self.config_file), shell=True)
Edit configuration file
def reset(self): shutil.copy2(self.config_file + ".orig", self.config_file) if filecmp.cmp(self.config_file + ".orig", self.config_file): print("{0}The reset was done{1}".format( self.meta.color["GREEN"], self.meta.color["ENDC"])) else: print("{0}Reset failed{1}".format(self.meta.color["RED"], self.meta.color["ENDC"]))
Reset slpkg.conf file with default values
def get(self): if self.arch.startswith("i") and self.arch.endswith("86"): self.arch = self.x86 elif self.meta.arch.startswith("arm"): self.arch = self.arm return self.arch
Return sbo arch
def results(self): print("") per = int(round((float(self.cf) / (self.cf + self.cn)) * 100)) if per > 90: color = self.meta.color["GREEN"] elif per < 90 and per > 60: color = self.meta.color["YELLOW"] elif per < 60: color = self.meta.color["RED"] health = "{0}{1}%{2}".format(color, str(per), self.meta.color["ENDC"]) self.msg.template(78) print("| {0}{1}{2}{3}{4}".format( "Total files", " " * 7, "Not installed", " " * 40, "Health")) self.msg.template(78) print("| {0}{1}{2}{3}{4:>4}".format( self.cf, " " * (18-len(str(self.cf))), self.cn, " " * (55-len(str(self.cn))), health)) self.msg.template(78)
Print results
def case_sensitive(self, lst): dictionary = {} for pkg in lst: dictionary[pkg.lower()] = pkg return dictionary
Create dictionary from list with key in lower case and value with default
def remove_dbs(self, double): one = [] for dup in double: if dup not in one: one.append(dup) return one
Remove double item from list
def read_file(self, registry): with open(registry, "r") as file_txt: read_file = file_txt.read() file_txt.close() return read_file
Returns reading file
def package_name(self, PACKAGES_TXT): packages = [] for line in PACKAGES_TXT.splitlines(): if line.startswith("PACKAGE NAME:"): packages.append(split_package(line[14:].strip())[0]) return packages
Returns list with all the names of packages repository
def check_downloaded(self, path, maybe_downloaded): downloaded = [] for pkg in maybe_downloaded: if os.path.isfile(path + pkg): downloaded.append(pkg) return downloaded
Check if files downloaded and return downloaded packages
def write_deps(deps_dict): for name, dependencies in deps_dict.iteritems(): if find_package(name + _meta_.sp, _meta_.pkg_path): dep_path = _meta_.log_path + "dep/" if not os.path.exists(dep_path): os.mkdir(dep_path) if os.path.isfile(dep_path + name): os.remove(dep_path + name) if len(dependencies) >= 1: with open(dep_path + name, "w") as f: for dep in dependencies: f.write(dep + "\n") f.close()
Write dependencies in a log file into directory `/var/log/slpkg/dep/`
def dependencies(self, deps_dict): try: import pygraphviz as pgv except ImportError: graph_easy, comma = "", "" if (self.image == "ascii" and not os.path.isfile("/usr/bin/graph-easy")): comma = "," graph_easy = " graph-easy" print("Require 'pygraphviz{0}{1}': Install with 'slpkg -s sbo " "pygraphviz{1}'".format(comma, graph_easy)) raise SystemExit() if self.image != "ascii": self.check_file() try: G = pgv.AGraph(deps_dict) G.layout(prog="fdp") if self.image == "ascii": G.write("{0}.dot".format(self.image)) self.graph_easy() G.draw(self.image) except IOError: raise SystemExit() if os.path.isfile(self.image): print("Graph image file '{0}' created".format(self.image)) raise SystemExit()
Generate graph file with depenndencies map tree
def check_file(self): try: image_type = ".{0}".format(self.image.split(".")[1]) if image_type not in self.file_format: print("Format: '{0}' not recognized. Use one of " "them:\n{1}".format(self.image.split(".")[1], ", ".join(self.file_format))) raise SystemExit() except IndexError: print("slpkg: Error: Image file suffix missing") raise SystemExit()
Check for file format and type
def graph_easy(self): if not os.path.isfile("/usr/bin/graph-easy"): print("Require 'graph-easy': Install with 'slpkg -s sbo " "graph-easy'") self.remove_dot() raise SystemExit() subprocess.call("graph-easy {0}.dot".format(self.image), shell=True) self.remove_dot() raise SystemExit()
Draw ascii diagram. graph-easy perl module require
def remove_dot(self): if os.path.isfile("{0}.dot".format(self.image)): os.remove("{0}.dot".format(self.image))
Remove .dot files
def pkg_security(pkgs): security_packages = Utils().read_file("/etc/slpkg/pkg_security") packages = [] for read in security_packages.splitlines(): read = read.lstrip() if not read.startswith("#"): packages.append(read.replace("\n", "")) for p in pkgs: for pkg in packages: if p == pkg: Msg().security_pkg(p) if not Msg().answer() in ["y", "Y"]: raise SystemExit()
Check packages before install or upgrade for security reasons. Configuration file in the /etc/slpkg/pkg_security
def alien_filter(packages, sizes): cache, npkg, nsize = [], [], [] for p, s in zip(packages, sizes): name = split_package(p)[0] if name not in cache: cache.append(name) npkg.append(p) nsize.append(s) return npkg, nsize
This filter avoid list double packages from alien repository
def upgrade(self, flag): for pkg in self.binary: try: subprocess.call("upgradepkg {0} {1}".format(flag, pkg), shell=True) check = pkg[:-4].split("/")[-1] if os.path.isfile(self.meta.pkg_path + check): print("Completed!\n") else: raise SystemExit() except subprocess.CalledProcessError: self._not_found("Can't upgrade", self.binary, pkg) raise SystemExit(1)
Upgrade Slackware binary packages with new
def remove(self, flag, extra): self.flag = flag self.extra = extra self.dep_path = self.meta.log_path + "dep/" dependencies, rmv_list = [], [] self.removed = self._view_removed() if not self.removed: print("") # new line at end else: msg = "package" if len(self.removed) > 1: msg = msg + "s" try: if self.meta.default_answer in ["y", "Y"]: remove_pkg = self.meta.default_answer else: remove_pkg = raw_input( "\nAre you sure to remove {0} {1} [y/N]? ".format( str(len(self.removed)), msg)) except EOFError: print("") # new line at exit raise SystemExit() if remove_pkg in ["y", "Y"]: self._check_if_used(self.binary) for rmv in self.removed: # If package build and install with "slpkg -s sbo <package>" # then look log file for dependencies in /var/log/slpkg/dep, # read and remove all else remove only the package. if (os.path.isfile(self.dep_path + rmv) and self.meta.del_deps in ["on", "ON"] or os.path.isfile(self.dep_path + rmv) and "--deps" in self.extra): dependencies = self._view_deps(self.dep_path, rmv) if dependencies and self._rmv_deps_answer() in ["y", "Y"]: rmv_list += self._rmv_deps(dependencies, rmv) else: rmv_list += self._rmv_pkg(rmv) else: rmv_list += self._rmv_pkg(rmv) # Prints all removed packages self._reference_rmvs(rmv_list)
Remove Slackware binary packages
def _rmv_deps_answer(self): if self.meta.remove_deps_answer in ["y", "Y"]: remove_dep = self.meta.remove_deps_answer else: try: remove_dep = raw_input( "\nRemove dependencies (maybe used by " "other packages) [y/N]? ") print("") except EOFError: print("") # new line at exit raise SystemExit() return remove_dep
Remove dependencies answer
def _get_removed(self): removed, packages = [], [] if "--tag" in self.extra: for pkg in find_package("", self.meta.pkg_path): for tag in self.binary: if pkg.endswith(tag): removed.append(split_package(pkg)[0]) packages.append(pkg) if not removed: self.msg.pkg_not_found("", "'tag'", "Can't remove", "\n") raise SystemExit(1) else: for pkg in self.binary: name = GetFromInstalled(pkg).name() ver = GetFromInstalled(pkg).version() package = find_package("{0}{1}{2}".format( name, ver, self.meta.sp), self.meta.pkg_path) if pkg and name == pkg: removed.append(pkg) packages.append(package[0]) else: self.msg.pkg_not_found("", pkg, "Can't remove", "\n") raise SystemExit(1) return removed, packages
Manage removed packages by extra options
def _view_removed(self): print("\nPackages with name matching [ {0}{1}{2} ]\n".format( self.meta.color["CYAN"], ", ".join(self.binary), self.meta.color["ENDC"])) removed, packages = self._get_removed() if packages and "--checklist" in self.extra: removed = [] text = "Press 'spacebar' to unchoose packages from the remove" backtitle = "{0} {1}".format(self.meta.__all__, self.meta.__version__) status = True pkgs = DialogUtil(packages, text, " Remove ", backtitle, status).checklist() if pkgs: for rmv in pkgs: removed.append(split_package(rmv)[0]) self.meta.default_answer = "y" else: for rmv, pkg in zip(removed, packages): print("[ {0}delete{1} ] --> {2}".format( self.meta.color["RED"], self.meta.color["ENDC"], pkg)) self._sizes(pkg) self._calc_sizes() self._remove_summary() return removed
View packages before removed
def _calc_sizes(self): if self.size > 1024: self.unit = "Mb" self.size = (self.size / 1024) if self.size > 1024: self.unit = "Gb" self.size = (self.size / 1024)
Package size calculation
def _remove_summary(self): if self.size > 0: print("\nRemoved summary") print("=" * 79) print("{0}Size of removed packages {1} {2}.{3}".format( self.meta.color["GREY"], round(self.size, 2), self.unit, self.meta.color["ENDC"]))
Removed packge size summary
def _view_deps(self, path, package): self.size = 0 packages = [] dependencies = (Utils().read_file(path + package)).splitlines() for dep in dependencies: if GetFromInstalled(dep).name(): ver = GetFromInstalled(dep).version() packages.append(dep + ver) else: dependencies.remove(dep) if packages: if "--checklist" in self.extra: deps, dependencies = [], [] text = "Found dependencies for the package {0}".format(package) backtitle = "{0} {1}".format(self.meta.__all__, self.meta.__version__) status = True deps = DialogUtil(packages, text, " Remove ", backtitle, status).checklist() for d in deps: dependencies.append("-".join(d.split("-")[:-1])) self.meta.remove_deps_answer = "y" else: print("") # new line at start self.msg.template(78) print("| Found dependencies for the package {0}:".format( package)) self.msg.template(78) for pkg in packages: find = find_package(pkg + self.meta.sp, self.meta.pkg_path) self._sizes(find[0]) print("| {0}{1}{2}".format(self.meta.color["RED"], pkg, self.meta.color["ENDC"])) self.msg.template(78) self._calc_sizes() print("| {0}Size of removed dependencies {1} {2}{3}".format( self.meta.color["GREY"], round(self.size, 2), self.unit, self.meta.color["ENDC"])) self.msg.template(78) return dependencies
View dependencies before remove
def _removepkg(self, package): try: subprocess.call("removepkg {0} {1}".format(self.flag, package), shell=True) if os.path.isfile(self.dep_path + package): os.remove(self.dep_path + package) # remove log except subprocess.CalledProcessError as er: print(er) raise SystemExit()
removepkg Slackware command