text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Forms the user-label matrix to be used in multi-label classification. <END_TASK> <USER_TASK:> Description: def form_user_label_matrix(user_twitter_list_keywords_gen, id_to_node, max_number_of_labels): """ Forms the user-label matrix to be used in multi-label classification. Input: - user_twitter_list_keywords_gen: - id_to_node: A Twitter id to node map as a python dictionary. Outputs: - user_label_matrix: A user-to-label matrix in scipy sparse matrix format. - annotated_nodes: A numpy array containing graph nodes. - label_to_lemma: A python dictionary that maps a numerical label to a string topic lemma. - lemma_to_keyword: A python dictionary that maps a lemma to the original keyword. """
user_label_matrix, annotated_nodes, label_to_lemma, node_to_lemma_tokeywordbag = form_user_term_matrix(user_twitter_list_keywords_gen, id_to_node, None) # write_terms_and_frequencies("/home/georgerizos/Documents/term_matrix.txt", user_label_matrix, label_to_lemma) user_label_matrix, annotated_nodes, label_to_lemma = filter_user_term_matrix(user_label_matrix, annotated_nodes, label_to_lemma, max_number_of_labels) # write_terms_and_frequencies("/home/georgerizos/Documents/label_matrix.txt", user_label_matrix, label_to_lemma) lemma_to_keyword = form_lemma_tokeyword_map(annotated_nodes, node_to_lemma_tokeywordbag) return user_label_matrix, annotated_nodes, label_to_lemma, lemma_to_keyword
<SYSTEM_TASK:> Forms a user-term matrix. <END_TASK> <USER_TASK:> Description: def form_user_term_matrix(user_twitter_list_keywords_gen, id_to_node, lemma_set=None, keyword_to_topic_manual=None): """ Forms a user-term matrix. Input: - user_twitter_list_keywords_gen: A python generator that yields a user Twitter id and a bag-of-words. - id_to_node: A Twitter id to node map as a python dictionary. - lemma_set: For the labelling, we use only lemmas in this set. Default: None Outputs: - user_term_matrix: A user-to-term matrix in scipy sparse matrix format. - annotated_nodes: A numpy array containing graph nodes. - label_to_topic: A python dictionary that maps a numerical label to a string topic/keyword. - node_to_lemma_tokeywordbag: A python dictionary that maps nodes to lemma-to-keyword bags. """
# Prepare for iteration. term_to_attribute = dict() user_term_matrix_row = list() user_term_matrix_col = list() user_term_matrix_data = list() append_user_term_matrix_row = user_term_matrix_row.append append_user_term_matrix_col = user_term_matrix_col.append append_user_term_matrix_data = user_term_matrix_data.append annotated_nodes = list() append_node = annotated_nodes.append node_to_lemma_tokeywordbag = dict() invalid_terms = list() counter = 0 if keyword_to_topic_manual is not None: manual_keyword_list = list(keyword_to_topic_manual.keys()) for user_twitter_id, user_annotation in user_twitter_list_keywords_gen: counter += 1 # print(counter) bag_of_words = user_annotation["bag_of_lemmas"] lemma_to_keywordbag = user_annotation["lemma_to_keywordbag"] if lemma_set is not None: bag_of_words = {lemma: multiplicity for lemma, multiplicity in bag_of_words.items() if lemma in lemma_set} lemma_to_keywordbag = {lemma: keywordbag for lemma, keywordbag in lemma_to_keywordbag.items() if lemma in lemma_set} node = id_to_node[user_twitter_id] append_node(node) node_to_lemma_tokeywordbag[node] = lemma_to_keywordbag for term, multiplicity in bag_of_words.items(): if term == "news": continue if keyword_to_topic_manual is not None: keyword_bag = lemma_to_keywordbag[term] term = max(keyword_bag.keys(), key=(lambda key: keyword_bag[key])) found_list_of_words = simple_word_query(term, manual_keyword_list, edit_distance=1) if len(found_list_of_words) > 0: term = found_list_of_words[0] try: term = keyword_to_topic_manual[term] except KeyError: print(term) vocabulary_size = len(term_to_attribute) attribute = term_to_attribute.setdefault(term, vocabulary_size) append_user_term_matrix_row(node) append_user_term_matrix_col(attribute) append_user_term_matrix_data(multiplicity) annotated_nodes = np.array(list(set(annotated_nodes)), dtype=np.int64) user_term_matrix_row = np.array(user_term_matrix_row, dtype=np.int64) user_term_matrix_col = np.array(user_term_matrix_col, dtype=np.int64) user_term_matrix_data = np.array(user_term_matrix_data, dtype=np.float64) user_term_matrix = sparse.coo_matrix((user_term_matrix_data, (user_term_matrix_row, user_term_matrix_col)), shape=(len(id_to_node), len(term_to_attribute))) label_to_topic = dict(zip(term_to_attribute.values(), term_to_attribute.keys())) # print(user_term_matrix.shape) # print(len(label_to_topic)) # print(invalid_terms) return user_term_matrix, annotated_nodes, label_to_topic, node_to_lemma_tokeywordbag
<SYSTEM_TASK:> Collects at most 500 Twitter lists for each user from an input list of Twitter user ids. <END_TASK> <USER_TASK:> Description: def fetch_twitter_lists_for_user_ids_generator(twitter_app_key, twitter_app_secret, user_id_list): """ Collects at most 500 Twitter lists for each user from an input list of Twitter user ids. Inputs: - twitter_app_key: What is says on the tin. - twitter_app_secret: Ditto. - user_id_list: A python list of Twitter user ids. Yields: - user_twitter_id: A Twitter user id. - twitter_lists_list: A python list containing Twitter lists in dictionary (json) format. """
#################################################################################################################### # Log into my application. #################################################################################################################### twitter = login(twitter_app_key, twitter_app_secret) #################################################################################################################### # For each user, gather at most 500 Twitter lists. #################################################################################################################### get_list_memberships_counter = 0 get_list_memberships_time_window_start = time.perf_counter() for user_twitter_id in user_id_list: # Make safe twitter request. try: twitter_lists_list, get_list_memberships_counter, get_list_memberships_time_window_start\ = safe_twitter_request_handler(twitter_api_func=twitter.get_list_memberships, call_rate_limit=15, call_counter=get_list_memberships_counter, time_window_start=get_list_memberships_time_window_start, max_retries=5, wait_period=2, user_id=user_twitter_id, count=500, cursor=-1) # If the call is succesful, yield the list of Twitter lists. yield user_twitter_id, twitter_lists_list except twython.TwythonError: # If the call is unsuccesful, we do not have any Twitter lists to store. yield user_twitter_id, None except URLError: # If the call is unsuccesful, we do not have any Twitter lists to store. yield user_twitter_id, None except BadStatusLine: # If the call is unsuccesful, we do not have any Twitter lists to store. yield user_twitter_id, None
<SYSTEM_TASK:> Sorts a centrality vector and returns the Twitter user ids that are to be annotated. <END_TASK> <USER_TASK:> Description: def decide_which_users_to_annotate(centrality_vector, number_to_annotate, already_annotated, node_to_id): """ Sorts a centrality vector and returns the Twitter user ids that are to be annotated. Inputs: - centrality_vector: A numpy array vector, that contains the centrality values for all users. - number_to_annotate: The number of users to annotate. - already_annotated: A python set of user twitter ids that have already been annotated. - node_to_id: A python dictionary that maps graph nodes to user twitter ids. Output: - user_id_list: A python list of Twitter user ids. """
# Sort the centrality vector according to decreasing centrality. centrality_vector = np.asarray(centrality_vector) ind = np.argsort(np.squeeze(centrality_vector)) if centrality_vector.size > 1: reversed_ind = ind[::-1] else: reversed_ind = list() reversed_ind = reversed_ind.append(ind) # Get the sublist of Twitter user ids to return. user_id_list = list() append_user_id = user_id_list.append counter = 0 for node in reversed_ind: user_twitter_id = node_to_id[node] if user_twitter_id not in already_annotated: append_user_id(user_twitter_id) counter += 1 if counter >= number_to_annotate: break return user_id_list
<SYSTEM_TASK:> A service that leverages twitter lists for on-demand annotation of popular users. <END_TASK> <USER_TASK:> Description: def on_demand_annotation(twitter_app_key, twitter_app_secret, user_twitter_id): """ A service that leverages twitter lists for on-demand annotation of popular users. TODO: Do this. """
#################################################################################################################### # Log into my application #################################################################################################################### twitter = login(twitter_app_key, twitter_app_secret) twitter_lists_list = twitter.get_list_memberships(user_id=user_twitter_id, count=1000) for twitter_list in twitter_lists_list: print(twitter_list) return twitter_lists_list
<SYSTEM_TASK:> Returns the registered member class for the given resource. <END_TASK> <USER_TASK:> Description: def get_member_class(resource): """ Returns the registered member class for the given resource. :param resource: registered resource :type resource: class implementing or instance providing or subclass of a registered resource interface. """
reg = get_current_registry() if IInterface in provided_by(resource): member_class = reg.getUtility(resource, name='member-class') else: member_class = reg.getAdapter(resource, IMemberResource, name='member-class') return member_class
<SYSTEM_TASK:> Returns the registered collection resource class for the given marker <END_TASK> <USER_TASK:> Description: def get_collection_class(resource): """ Returns the registered collection resource class for the given marker interface or member resource class or instance. :param rc: registered resource :type rc: class implementing or instance providing or subclass of a registered resource interface. """
reg = get_current_registry() if IInterface in provided_by(resource): coll_class = reg.getUtility(resource, name='collection-class') else: coll_class = reg.getAdapter(resource, ICollectionResource, name='collection-class') return coll_class
<SYSTEM_TASK:> Adapts an object to a location aware member resource. <END_TASK> <USER_TASK:> Description: def as_member(entity, parent=None): """ Adapts an object to a location aware member resource. :param entity: a domain object for which a resource adapter has been registered :type entity: an object implementing :class:`everest.entities.interfaces.IEntity` :param parent: optional parent collection resource to make the new member a child of :type parent: an object implementing :class:`everest.resources.interfaces.ICollectionResource` :returns: an object implementing :class:`everest.resources.interfaces.IMemberResource` """
reg = get_current_registry() rc = reg.getAdapter(entity, IMemberResource) if not parent is None: rc.__parent__ = parent # interface method pylint: disable=E1121 return rc
<SYSTEM_TASK:> Returns a list of all registered collection resource classes. <END_TASK> <USER_TASK:> Description: def get_registered_collection_resources(): """ Returns a list of all registered collection resource classes. """
reg = get_current_registry() return [util.component for util in reg.registeredUtilities() if util.name == 'collection-class']
<SYSTEM_TASK:> Converts the given resource to a URL. <END_TASK> <USER_TASK:> Description: def resource_to_url(resource, request=None, quote=False): """ Converts the given resource to a URL. :param request: Request object (required for the host name part of the URL). If this is not given, the current request is used. :param bool quote: If set, the URL returned will be quoted. """
if request is None: request = get_current_request() # cnv = request.registry.getAdapter(request, IResourceUrlConverter) reg = get_current_registry() cnv = reg.getAdapter(request, IResourceUrlConverter) return cnv.resource_to_url(resource, quote=quote)
<SYSTEM_TASK:> Converts the given URL to a resource. <END_TASK> <USER_TASK:> Description: def url_to_resource(url, request=None): """ Converts the given URL to a resource. :param request: Request object (required for the host name part of the URL). If this is not given, the current request is used. """
if request is None: request = get_current_request() # cnv = request.registry.getAdapter(request, IResourceUrlConverter) reg = get_current_registry() cnv = reg.getAdapter(request, IResourceUrlConverter) return cnv.url_to_resource(url)
<SYSTEM_TASK:> Returns the entity class registered for the given registered resource. <END_TASK> <USER_TASK:> Description: def get_entity_class(resource): """ Returns the entity class registered for the given registered resource. :param resource: registered resource :type collection: class implementing or instance providing a registered resource interface. :return: entity class (class implementing `everest.entities.interfaces.IEntity`) """
reg = get_current_registry() if IInterface in provided_by(resource): ent_cls = reg.getUtility(resource, name='entity-class') else: ent_cls = reg.getAdapter(resource, IEntity, name='entity-class') return ent_cls
<SYSTEM_TASK:> install board with programmer. <END_TASK> <USER_TASK:> Description: def install_board_with_programmer(mcu, programmer, f_cpu=16000000, core='arduino', replace_existing=False, ): """install board with programmer."""
bunch = AutoBunch() board_id = '{mcu}_{f_cpu}_{programmer}'.format(f_cpu=f_cpu, mcu=mcu, programmer=programmer, ) bunch.name = '{mcu}@{f} Prog:{programmer}'.format(f=strfreq(f_cpu), mcu=mcu, programmer=programmer, ) bunch.upload.using = programmer bunch.build.mcu = mcu bunch.build.f_cpu = str(f_cpu) + 'L' bunch.build.core = core install_board(board_id, bunch, replace_existing=replace_existing)
<SYSTEM_TASK:> logs a message and prints it to the screen <END_TASK> <USER_TASK:> Description: def logMsg(self, msg, printMsg=True): """ logs a message and prints it to the screen """
time = datetime.datetime.now().strftime('%I:%M %p') self.log = '{0}\n{1} | {2}'.format(self.log, time, msg) if printMsg: print msg if self.addLogsToArcpyMessages: from arcpy import AddMessage AddMessage(msg)
<SYSTEM_TASK:> logs the arcpy messages and prints them to the screen <END_TASK> <USER_TASK:> Description: def logGPMsg(self, printMsg=True): """ logs the arcpy messages and prints them to the screen """
from arcpy import GetMessages msgs = GetMessages() try: self.logMsg(msgs, printMsg) except: self.logMsg('error getting arcpy message', printMsg)
<SYSTEM_TASK:> writes the log to a <END_TASK> <USER_TASK:> Description: def writeLogToFile(self): """ writes the log to a """
if not os.path.exists(self.logFolder): os.mkdir(self.logFolder) with open(self.logFile, mode='a') as f: f.write('\n\n' + self.log)
<SYSTEM_TASK:> Return the URL of a random GIF related to the phrase, if possible <END_TASK> <USER_TASK:> Description: def get_random_giphy(phrase): """Return the URL of a random GIF related to the phrase, if possible"""
with warnings.catch_warnings(): warnings.simplefilter('ignore') giphy = giphypop.Giphy() results = giphy.search_list(phrase=phrase, limit=100) if not results: raise ValueError('There were no results for that phrase') return random.choice(results).media_url
<SYSTEM_TASK:> Display an image for the phrase in sys.argv, if possible <END_TASK> <USER_TASK:> Description: def handle_command_line(): """Display an image for the phrase in sys.argv, if possible"""
phrase = ' '.join(sys.argv[1:]) or 'random' try: giphy = get_random_giphy(phrase) except ValueError: sys.stderr.write('Unable to find any GIFs for {!r}\n'.format(phrase)) sys.exit(1) display(fetch_image(giphy))
<SYSTEM_TASK:> Makes all folders declared in the config if they <END_TASK> <USER_TASK:> Description: def make_required_folders(self): """Makes all folders declared in the config if they do not exist. """
for folder in [ self.pending_folder, self.usb_incoming_folder, self.outgoing_folder, self.incoming_folder, self.archive_folder, self.tmp_folder, self.log_folder, ]: if not os.path.exists(folder): os.makedirs(folder)
<SYSTEM_TASK:> Return a list of sham.network.interfaces.NetworkInterface <END_TASK> <USER_TASK:> Description: def get_interfaces(self): """ Return a list of sham.network.interfaces.NetworkInterface describing all the interfaces this VM has """
interfaces = self.xml.find('devices').iter('interface') iobjs = [] for interface in interfaces: _type = interface.attrib['type'] mac = interface.find('mac').attrib['address'] source = interface.find('source').attrib[_type] model = interface.find('model').attrib['type'] iobjs.append(NetworkInterface(_type, mac, source, model)) return iobjs
<SYSTEM_TASK:> Return a list of all the Disks attached to this VM <END_TASK> <USER_TASK:> Description: def get_disks(self): """ Return a list of all the Disks attached to this VM The disks are returned in a sham.storage.volumes.Volume object """
disks = [disk for disk in self.xml.iter('disk')] disk_objs = [] for disk in disks: source = disk.find('source') if source is None: continue path = source.attrib['file'] diskobj = self.domain.connect().storageVolLookupByPath(path) disk_objs.append(diskobj) return [Volume(d, StoragePool(d.storagePoolLookupByVolume())) for d in disk_objs]
<SYSTEM_TASK:> Delete this VM, and remove all its disks <END_TASK> <USER_TASK:> Description: def delete(self): """ Delete this VM, and remove all its disks """
disks = self.get_disks() self.domain.undefine() for disk in disks: disk.wipe() disk.delete()
<SYSTEM_TASK:> Return the values contained in this object as a dict <END_TASK> <USER_TASK:> Description: def to_dict(self): """ Return the values contained in this object as a dict """
return {'domain_type': self.domain_type, 'max_memory': self.max_memory, 'current_memory': self.current_memory, 'num_cpus': self.num_cpus, 'running': self.is_running(), 'name': self.name, }
<SYSTEM_TASK:> Given a url, try to guess what kind of VCS it's for. Return None if we <END_TASK> <USER_TASK:> Description: def guess_url_vcs(url): """ Given a url, try to guess what kind of VCS it's for. Return None if we can't make a good guess. """
parsed = urllib.parse.urlsplit(url) if parsed.scheme in ('git', 'svn'): return parsed.scheme elif parsed.path.endswith('.git'): return 'git' elif parsed.hostname == 'github.com': return 'git' # If it's an http url, we can try requesting it and guessing from the # contents. if parsed.scheme in ('http', 'https'): resp = requests.get(url) if re.match('basehttp.*python.*', resp.headers.get('server').lower()): # It's the mercurial http server return 'hg' return None
<SYSTEM_TASK:> Given a path for a folder on the local filesystem, see what kind of vcs <END_TASK> <USER_TASK:> Description: def guess_folder_vcs(folder): """ Given a path for a folder on the local filesystem, see what kind of vcs repo it is, if any. """
try: contents = os.listdir(folder) vcs_folders = ['.git', '.hg', '.svn'] found = next((x for x in vcs_folders if x in contents), None) # Chop off the dot if we got a string back return found[1:] if found else None except OSError: return None
<SYSTEM_TASK:> Return the name of the folder that you'd get if you cloned 'url' into the <END_TASK> <USER_TASK:> Description: def basename(url): """ Return the name of the folder that you'd get if you cloned 'url' into the current working directory. """
# It's easy to accidentally have whitespace on the beginning or end of the # url. url = url.strip() url, _sep, _fragment = url.partition('#') # Remove trailing slash from url if present if url.endswith('/'): url = url[:-1] # Also strip .git from url if it ends in that. return re.sub(r'\.git$', '', url.split('/')[-1])
<SYSTEM_TASK:> Assuming that the repo has been cloned locally, get its default <END_TASK> <USER_TASK:> Description: def get_url(self): """ Assuming that the repo has been cloned locally, get its default upstream URL. """
cmd = { 'hg': 'hg paths default', 'git': 'git config --local --get remote.origin.url', }[self.vcs_type] with chdir(self.folder): r = self.run(cmd) return r.output.replace('\n', '')
<SYSTEM_TASK:> Returns an absolute URL matching given view with its parameters. <END_TASK> <USER_TASK:> Description: def fburl(parser, token): """ Returns an absolute URL matching given view with its parameters. This is a way to define links that aren't tied to a particular URL configuration:: {% url path.to.some_view arg1,arg2,name1=value1 %} The first argument is a path to a view. It can be an absolute python path or just ``app_name.view_name`` without the project name if the view is located inside the project. Other arguments are comma-separated values that will be filled in place of positional and keyword arguments in the URL. All arguments for the URL should be present. For example if you have a view ``app_name.client`` taking client's id and the corresponding line in a URLconf looks like this:: ('^client/(\d+)/$', 'app_name.client') and this app's URLconf is included into the project's URLconf under some path:: ('^clients/', include('project_name.app_name.urls')) then in a template you can create a link for a certain client like this:: {% url app_name.client client.id %} The URL will look like ``/clients/client/123/``. """
bits = token.contents.split(' ') if len(bits) < 2: raise template.TemplateSyntaxError("'%s' takes at least one argument" " (path to a view)" % bits[0]) viewname = bits[1] args = [] kwargs = {} asvar = None if len(bits) > 2: bits = iter(bits[2:]) for bit in bits: if bit == 'as': asvar = bits.next() break else: for arg in bit.split(","): if '=' in arg: k, v = arg.split('=', 1) k = k.strip() kwargs[k] = parser.compile_filter(v) elif arg: args.append(parser.compile_filter(arg)) return URLNode(viewname, args, kwargs, asvar)
<SYSTEM_TASK:> Decorator executing method in directory 'dir'. <END_TASK> <USER_TASK:> Description: def chdir(method): """Decorator executing method in directory 'dir'. """
def wrapper(self, dir, *args, **kw): dirstack = ChdirStack() dirstack.push(dir) try: return method(self, dir, *args, **kw) finally: dirstack.pop() return functools.wraps(method)(wrapper)
<SYSTEM_TASK:> Pop dir off stack and change to it. <END_TASK> <USER_TASK:> Description: def pop(self): """Pop dir off stack and change to it. """
if len(self.stack): os.chdir(self.stack.pop())
<SYSTEM_TASK:> Returns a matching target object for the given source ID. <END_TASK> <USER_TASK:> Description: def get_matching(self, source_id): """ Returns a matching target object for the given source ID. """
value = self._accessor.get_by_id(source_id) if not value is None: reg = get_current_registry() prx_fac = reg.getUtility(IDataTraversalProxyFactory) prx = prx_fac.make_proxy(value, self._accessor, self.relationship_direction, self.relation_operation) else: prx = None return prx
<SYSTEM_TASK:> Returns an iterator of items for an attribute value map to use for <END_TASK> <USER_TASK:> Description: def update_attribute_value_items(self): """ Returns an iterator of items for an attribute value map to use for an UPDATE operation. The iterator ignores collection attributes as these are processed implicitly by the traversal algorithm. :returns: iterator yielding tuples with objects implementing :class:`everest.resources.interfaces.IResourceAttribute` as the first and the proxied attribute value as the second argument. """
for attr in self._attribute_iterator(): if attr.kind != RESOURCE_ATTRIBUTE_KINDS.COLLECTION: try: attr_val = self._get_proxied_attribute_value(attr) except AttributeError: continue else: yield (attr, attr_val)
<SYSTEM_TASK:> Returns the entity converted from the proxied data. <END_TASK> <USER_TASK:> Description: def get_entity(self): """ Returns the entity converted from the proxied data. """
if self._accessor is None: if self.__converted_entity is None: self.__converted_entity = self._convert_to_entity() else: # If we have an accessor, we can get the proxied entity by ID. # FIXME: This is a hack that is only used for REMOVE operations # with data elements. self.__converted_entity = \ self.get_matching(self.get_id()).get_entity() return self.__converted_entity
<SYSTEM_TASK:> Factory method to create a tree traverser depending on the input <END_TASK> <USER_TASK:> Description: def make_traverser(cls, source_data, target_data, relation_operation, accessor=None, manage_back_references=True): """ Factory method to create a tree traverser depending on the input source and target data combination. :param source_data: Source data. :param target_target: Target data. :param str relation_operation: Relation operation. On of the constants defined in :class:`everest.constants.RELATION_OPERATIONS`. :param accessor: Accessor for looking up target nodes for update operations. :param bool manage_back_references: If set, backreferences will automatically be updated in the target data. """
reg = get_current_registry() prx_fac = reg.getUtility(IDataTraversalProxyFactory) if relation_operation == RELATION_OPERATIONS.ADD \ or relation_operation == RELATION_OPERATIONS.UPDATE: if relation_operation == RELATION_OPERATIONS.ADD \ and not target_data is None: raise ValueError('Must not provide target data with ' 'relation operation ADD.') source_proxy = prx_fac.make_proxy(source_data, None, RELATIONSHIP_DIRECTIONS.NONE, relation_operation) source_is_sequence = \ source_proxy.proxy_for == RESOURCE_KINDS.COLLECTION if not source_is_sequence: source_id = source_proxy.get_id() else: source_proxy = None source_is_sequence = False if relation_operation == RELATION_OPERATIONS.REMOVE \ or relation_operation == RELATION_OPERATIONS.UPDATE: rel_dir = RELATIONSHIP_DIRECTIONS.BIDIRECTIONAL if not manage_back_references: rel_dir &= ~RELATIONSHIP_DIRECTIONS.REVERSE if relation_operation == RELATION_OPERATIONS.REMOVE: if not source_data is None: raise ValueError('Must not provide source data with ' 'relation operation REMOVE.') target_proxy = prx_fac.make_proxy(target_data, accessor, rel_dir, relation_operation) else: # UPDATE if accessor is None: raise ValueError('Need to provide an accessor when ' 'performing UPDATE operations.') if not target_data is None: target_root = target_data elif not source_is_sequence: # Look up the (single) target to update. target_root = accessor.get_by_id(source_id) if target_root is None: raise ValueError('Entity with ID %s to update not ' 'found.' % source_id) else: # Look up collection of targets to update. target_root = [] for src_prx in source_proxy: tgt_ent_id = src_prx.get_id() if tgt_ent_id is None: continue tgt_ent = accessor.get_by_id(tgt_ent_id) if tgt_ent is None: continue target_root.append(tgt_ent) target_proxy = prx_fac.make_proxy(target_root, accessor, rel_dir, relation_operation) target_is_sequence = \ target_proxy.proxy_for == RESOURCE_KINDS.COLLECTION else: target_proxy = None target_is_sequence = False if not source_proxy is None and not target_proxy is None: # Check for source/target consistency. if not ((source_is_sequence and target_is_sequence) or (not source_is_sequence and not target_is_sequence)): raise ValueError('When both source and target root nodes are ' 'given, they can either both be sequences ' 'or both not be sequences.') return cls(source_proxy, target_proxy)
<SYSTEM_TASK:> Loads all plugins advertising entry points with the given group name. <END_TASK> <USER_TASK:> Description: def load_all(self, group): """ Loads all plugins advertising entry points with the given group name. The specified plugin needs to be a callable that accepts the everest configurator as single argument. """
for ep in iter_entry_points(group=group): plugin = ep.load() plugin(self.__config)
<SYSTEM_TASK:> remove libraries by GUI. <END_TASK> <USER_TASK:> Description: def gui(): """remove libraries by GUI."""
sel = psidialogs.multi_choice(libraries(), 'select libraries to remove from %s!' % libraries_dir(), title='remove boards') print('%s selected' % sel) if sel: if psidialogs.ask_yes_no('Do you really want to remove selected libraries?\n' + '\n'.join(sel)): for x in sel: remove_lib(x) print('%s was removed' % x)
<SYSTEM_TASK:> Return ast.Expression created from source code given in `src`. <END_TASK> <USER_TASK:> Description: def src2ast(src: str) -> Expression: """Return ast.Expression created from source code given in `src`."""
try: return ast.parse(src, mode='eval') except SyntaxError: raise ValueError("Not a valid expression.") from None
<SYSTEM_TASK:> Replace all Name nodes named `old_name` with nodes named `new_name`. <END_TASK> <USER_TASK:> Description: def replace_name(expr: AST, old_name: str, new_name: str) -> AST: """Replace all Name nodes named `old_name` with nodes named `new_name`."""
return _NameReplacer(old_name, new_name).visit(deepcopy(expr))
<SYSTEM_TASK:> Return expression which is the negation of `expr`. <END_TASK> <USER_TASK:> Description: def Negation(expr: Expression) -> Expression: """Return expression which is the negation of `expr`."""
expr = Expression(_negate(expr.body)) return ast.fix_missing_locations(expr)
<SYSTEM_TASK:> Return expression which is the conjunction of `expr1` and `expr2`. <END_TASK> <USER_TASK:> Description: def Conjunction(expr1: Expression, expr2: Expression) -> Expression: """Return expression which is the conjunction of `expr1` and `expr2`."""
expr = Expression(ast.BoolOp(ast.And(), [expr1.body, expr2.body])) return ast.fix_missing_locations(expr)
<SYSTEM_TASK:> Return expression which is the disjunction of `expr1` and `expr2`. <END_TASK> <USER_TASK:> Description: def Disjunction(expr1: Expression, expr2: Expression) -> Expression: """Return expression which is the disjunction of `expr1` and `expr2`."""
expr = Expression(ast.BoolOp(ast.Or(), [expr1.body, expr2.body])) return ast.fix_missing_locations(expr)
<SYSTEM_TASK:> Return expression which is the contradiction of `expr1` and `expr2`. <END_TASK> <USER_TASK:> Description: def Contradiction(expr1: Expression, expr2: Expression) -> Expression: """Return expression which is the contradiction of `expr1` and `expr2`."""
expr = Disjunction(Conjunction(expr1, Negation(expr2)), Conjunction(Negation(expr1), expr2)) return ast.fix_missing_locations(expr)
<SYSTEM_TASK:> Return the difference betweens the binding levels of the current <END_TASK> <USER_TASK:> Description: def diff_binding(self) -> int: """Return the difference betweens the binding levels of the current and the previous operator. """
try: prev_op, prev_op_binding = self.nested_ops[-2] except IndexError: prev_op, prev_op_binding = None, 0 try: curr_op, curr_op_binding = self.nested_ops[-1] except IndexError: curr_op, curr_op_binding = None, 0 # special case if prev_op is ast.Pow and isinstance(curr_op, (ast.Invert, ast.USub)): return 1 # print(prev_op, prev_op_binding, curr_op, curr_op_binding) return curr_op_binding - prev_op_binding
<SYSTEM_TASK:> Process `node` by dispatching to a handler. <END_TASK> <USER_TASK:> Description: def visit(self, node: AST, dfltChaining: bool = True) -> str: """Process `node` by dispatching to a handler."""
# print(node.__class__.__name__) if node is None: return '' if isinstance(node, ast.Expression): return self.visit(node.body) # dispatch to specific or generic method method = 'visit_' + node.__class__.__name__ visitor = getattr(self, method, self.generic_visit) return visitor(node, dfltChaining)
<SYSTEM_TASK:> Default handler, called if no explicit visitor function exists for <END_TASK> <USER_TASK:> Description: def generic_visit(self, node: AST, dfltChaining: bool = True) -> str: """Default handler, called if no explicit visitor function exists for a node. """
for field, value in ast.iter_fields(node): if isinstance(value, list): for item in value: if isinstance(item, AST): self.visit(item) elif isinstance(value, AST): self.visit(value)
<SYSTEM_TASK:> Return `node`s value formatted according to its format spec. <END_TASK> <USER_TASK:> Description: def visit_FormattedValue(self, node: AST, dfltChaining: bool = True) -> str: """Return `node`s value formatted according to its format spec."""
format_spec = node.format_spec return f"{{{self.visit(node.value)}" \ f"{self.CONV_MAP.get(node.conversion, '')}" \ f"{':'+self._nested_str(format_spec) if format_spec else ''}}}"
<SYSTEM_TASK:> Return representation of starred expresssion. <END_TASK> <USER_TASK:> Description: def visit_Starred(self, node: AST, dfltChaining: bool = True) -> str: """Return representation of starred expresssion."""
with self.op_man(node): return f"*{self.visit(node.value)}"
<SYSTEM_TASK:> Return representation of nested expression. <END_TASK> <USER_TASK:> Description: def visit_Expr(self, node: AST, dfltChaining: bool = True) -> str: """Return representation of nested expression."""
return self.visit(node.value)
<SYSTEM_TASK:> Return representation of `node`s operator and operand. <END_TASK> <USER_TASK:> Description: def visit_UnaryOp(self, node: AST, dfltChaining: bool = True) -> str: """Return representation of `node`s operator and operand."""
op = node.op with self.op_man(op): return self.visit(op) + self.visit(node.operand)
<SYSTEM_TASK:> Return `node`s operators and operands as inlined expression. <END_TASK> <USER_TASK:> Description: def visit_Compare(self, node: AST, dfltChaining: bool = True) -> str: """Return `node`s operators and operands as inlined expression."""
# all comparison operators have the same precedence, # we just take the first one as representative first_op = node.ops[0] with self.op_man(first_op): cmps = [' '.join((self.visit(op), self.visit(cmp, dfltChaining=False))) for op, cmp in zip(node.ops, node.comparators)] src = ' '.join((self.visit(node.left), ' '.join(cmps))) return self.wrap_expr(src, dfltChaining)
<SYSTEM_TASK:> Return representation of `node` as keyword arg. <END_TASK> <USER_TASK:> Description: def visit_keyword(self, node: AST, dfltChaining: bool = True) -> str: """Return representation of `node` as keyword arg."""
arg = node.arg if arg is None: return f"**{self.visit(node.value)}" else: return f"{arg}={self.visit(node.value)}"
<SYSTEM_TASK:> Return `node`s representation as argument list. <END_TASK> <USER_TASK:> Description: def visit_arguments(self, node: AST, dfltChaining: bool = True) -> str: """Return `node`s representation as argument list."""
args = node.args dflts = node.defaults vararg = node.vararg kwargs = node.kwonlyargs kwdflts = node.kw_defaults kwarg = node.kwarg self.compact = True n_args_without_dflt = len(args) - len(dflts) args_src = (arg.arg for arg in args[:n_args_without_dflt]) dflts_src = (f"{arg.arg}={self.visit(dflt)}" for arg, dflt in zip(args[n_args_without_dflt:], dflts)) vararg_src = (f"*{vararg.arg}",) if vararg else () kwargs_src = ((f"{kw.arg}={self.visit(dflt)}" if dflt is not None else f"{kw.arg}") for kw, dflt in zip(kwargs, kwdflts)) kwarg_src = (f"**{kwarg.arg}",) if kwarg else () src = ', '.join(chain(args_src, dflts_src, vararg_src, kwargs_src, kwarg_src)) self.compact = False return src
<SYSTEM_TASK:> Return `node`s representation as lambda expression. <END_TASK> <USER_TASK:> Description: def visit_Lambda(self, node: AST, dfltChaining: bool = True) -> str: """Return `node`s representation as lambda expression."""
with self.op_man(node): src = f"lambda {self.visit(node.args)}: {self.visit(node.body)}" return self.wrap_expr(src, dfltChaining)
<SYSTEM_TASK:> Return `node`s representation as attribute access. <END_TASK> <USER_TASK:> Description: def visit_Attribute(self, node: AST, dfltChaining: bool = True) -> str: """Return `node`s representation as attribute access."""
return '.'.join((self.visit(node.value), node.attr))
<SYSTEM_TASK:> Return `node`s representation as slice. <END_TASK> <USER_TASK:> Description: def visit_Slice(self, node: AST, dfltChaining: bool = True) -> str: """Return `node`s representation as slice."""
elems = [self.visit(node.lower), self.visit(node.upper)] if node.step is not None: elems.append(self.visit(node.step)) return ':'.join(elems)
<SYSTEM_TASK:> Return `node`s representation as extended slice. <END_TASK> <USER_TASK:> Description: def visit_ExtSlice(self, node: AST, dfltChaining: bool = True) -> str: """Return `node`s representation as extended slice."""
return ', '.join((self.visit(dim) for dim in node.dims))
<SYSTEM_TASK:> Return `node`s representation as generator expression. <END_TASK> <USER_TASK:> Description: def visit_GeneratorExp(self, node: AST, dfltChaining: bool = True) -> str: """Return `node`s representation as generator expression."""
return f"({self.visit(node.elt)} " \ f"{' '.join(self.visit(gen) for gen in node.generators)})"
<SYSTEM_TASK:> Return the visible width of the text in line buffer up to position. <END_TASK> <USER_TASK:> Description: def visible_line_width(self, position = Point): """Return the visible width of the text in line buffer up to position."""
extra_char_width = len([ None for c in self[:position].line_buffer if 0x2013 <= ord(c) <= 0xFFFD]) return len(self[:position].quoted_text()) + self[:position].line_buffer.count(u"\t")*7 + extra_char_width
<SYSTEM_TASK:> Run git add and commit with message if provided. <END_TASK> <USER_TASK:> Description: def run(self): """Run git add and commit with message if provided."""
if os.system('git add .'): sys.exit(1) if self.message is not None: os.system('git commit -a -m "' + self.message + '"') else: os.system('git commit -a')
<SYSTEM_TASK:> Set new uri value in record. <END_TASK> <USER_TASK:> Description: def uri(self, value): """Set new uri value in record. It will not change the location of the underlying file! """
jsonpointer.set_pointer(self.record, self.pointer, value)
<SYSTEM_TASK:> Open file ``uri`` under the pointer. <END_TASK> <USER_TASK:> Description: def open(self, mode='r', **kwargs): """Open file ``uri`` under the pointer."""
_fs, filename = opener.parse(self.uri) return _fs.open(filename, mode=mode, **kwargs)
<SYSTEM_TASK:> Move file to a new destination and update ``uri``. <END_TASK> <USER_TASK:> Description: def move(self, dst, **kwargs): """Move file to a new destination and update ``uri``."""
_fs, filename = opener.parse(self.uri) _fs_dst, filename_dst = opener.parse(dst) movefile(_fs, filename, _fs_dst, filename_dst, **kwargs) self.uri = dst
<SYSTEM_TASK:> Create a new file from a string or file-like object. <END_TASK> <USER_TASK:> Description: def setcontents(self, source, **kwargs): """Create a new file from a string or file-like object."""
if isinstance(source, six.string_types): _file = opener.open(source, 'rb') else: _file = source # signals.document_before_content_set.send(self) data = _file.read() _fs, filename = opener.parse(self.uri) _fs.setcontents(filename, data, **kwargs) _fs.close() # signals.document_after_content_set.send(self) if isinstance(source, six.string_types) and hasattr(_file, 'close'): _file.close()
<SYSTEM_TASK:> Remove file reference from record. <END_TASK> <USER_TASK:> Description: def remove(self, force=False): """Remove file reference from record. If force is True it removes the file from filesystem """
if force: _fs, filename = opener.parse(self.uri) _fs.remove(filename) self.uri = None
<SYSTEM_TASK:> return the content of the XML document as a byte string suitable for writing <END_TASK> <USER_TASK:> Description: def tobytes( self, root=None, encoding='UTF-8', doctype=None, canonicalized=True, xml_declaration=True, pretty_print=True, with_comments=True, ): """return the content of the XML document as a byte string suitable for writing"""
if root is None: root = self.root if canonicalized == True: return self.canonicalized_bytes(root) else: return etree.tostring( root, encoding=encoding or self.info.encoding, doctype=doctype or self.info.doctype, xml_declaration=xml_declaration, pretty_print=pretty_print, with_comments=with_comments, )
<SYSTEM_TASK:> return the content of the XML document as a unicode string <END_TASK> <USER_TASK:> Description: def tostring(self, root=None, doctype=None, pretty_print=True): """return the content of the XML document as a unicode string"""
if root is None: root = self.root return etree.tounicode( root, doctype=doctype or self.info.doctype, pretty_print=pretty_print )
<SYSTEM_TASK:> calculate a digest based on the hash of the XML content <END_TASK> <USER_TASK:> Description: def digest(self, **args): """calculate a digest based on the hash of the XML content"""
return String(XML.canonicalized_string(self.root)).digest(**args)
<SYSTEM_TASK:> given a tag in xpath form and optional attributes, find the element in self.root or return a new one. <END_TASK> <USER_TASK:> Description: def element(self, tag_path, test=None, **attributes): """given a tag in xpath form and optional attributes, find the element in self.root or return a new one."""
xpath = tag_path tests = ["@%s='%s'" % (k, attributes[k]) for k in attributes] if test is not None: tests.insert(0, test) if len(tests) > 0: xpath += "[%s]" % ' and '.join(tests) e = self.find(self.root, xpath) if e is None: tag = tag_path.split('/')[-1].split('[')[0] tagname = tag.split(':')[-1] if ':' in tag: nstag = tag.split(':')[0] tag = "{%s}%s" % (self.NS[nstag], tagname) e = etree.Element(tag, **attributes) return e
<SYSTEM_TASK:> return the URL, if any, for the doc root or elem, if given. <END_TASK> <USER_TASK:> Description: def namespace(self, elem=None): """return the URL, if any, for the doc root or elem, if given."""
if elem is None: elem = self.root return XML.tag_namespace(elem.tag)
<SYSTEM_TASK:> return the namespace for a given tag, or '' if no namespace given <END_TASK> <USER_TASK:> Description: def tag_namespace(cls, tag): """return the namespace for a given tag, or '' if no namespace given"""
md = re.match("^(?:\{([^\}]*)\})", tag) if md is not None: return md.group(1)
<SYSTEM_TASK:> return the name of the tag, with the namespace removed <END_TASK> <USER_TASK:> Description: def tag_name(cls, tag): """return the name of the tag, with the namespace removed"""
while isinstance(tag, etree._Element): tag = tag.tag return tag.split('}')[-1]
<SYSTEM_TASK:> return a dict of element tags, their attribute names, and optionally attribute values, <END_TASK> <USER_TASK:> Description: def element_map( self, tags=None, xpath="//*", exclude_attribs=[], include_attribs=[], attrib_vals=False, hierarchy=False, minimize=False, ): """return a dict of element tags, their attribute names, and optionally attribute values, in the XML document """
if tags is None: tags = Dict() for elem in self.root.xpath(xpath): if elem.tag not in tags.keys(): tags[elem.tag] = Dict(**{'parents': [], 'children': [], 'attributes': Dict()}) for a in [ a for a in elem.attrib.keys() if (include_attribs == [] and a not in exclude_attribs) or (a in include_attribs) ]: # Attribute Names if a not in tags[elem.tag].attributes.keys(): tags[elem.tag].attributes[a] = [] # Attribute Values if attrib_vals == True and elem.get(a) not in tags[elem.tag].attributes[a]: tags[elem.tag].attributes[a].append(elem.get(a)) # Hierarchy: Parents and Children if hierarchy == True: parent = elem.getparent() if parent is not None and parent.tag not in tags[elem.tag].parents: tags[elem.tag].parents.append(parent.tag) for child in elem.xpath("*"): if child.tag not in tags[elem.tag].children: tags[elem.tag].children.append(child.tag) if minimize == True: for tag in tags.keys(): if tags[tag].get('parents') == []: tags[tag].pop('parents') if tags[tag].get('children') == []: tags[tag].pop('children') if tags[tag].get('attributes') == {}: tags[tag].pop('attributes') if tags[tag] == {}: tags.pop(tag) return tags
<SYSTEM_TASK:> convert a dict key into an element or attribute name <END_TASK> <USER_TASK:> Description: def dict_key_tag(Class, key, namespaces=None): """convert a dict key into an element or attribute name"""
namespaces = namespaces or Class.NS ns = Class.tag_namespace(key) tag = Class.tag_name(key) if ns is None and ':' in key: prefix, tag = key.split(':') if prefix in namespaces.keys(): ns = namespaces[prefix] if ns is not None: tag = "{%s}%s" % (ns, tag) return tag
<SYSTEM_TASK:> delete everything from elem to end_elem, including elem. <END_TASK> <USER_TASK:> Description: def remove_range(cls, elem, end_elem, delete_end=True): """delete everything from elem to end_elem, including elem. if delete_end==True, also including end_elem; otherwise, leave it."""
while elem is not None and elem != end_elem and end_elem not in elem.xpath("descendant::*"): parent = elem.getparent() nxt = elem.getnext() parent.remove(elem) if DEBUG == True: print(etree.tounicode(elem)) elem = nxt if elem == end_elem: if delete_end == True: cls.remove(end_elem, leave_tail=True) elif elem is None: if parent.tail not in [None, '']: parent.tail = '' cls.remove_range(parent.getnext(), end_elem) XML.remove_if_empty(parent) elif end_elem in elem.xpath("descendant::*"): if DEBUG == True: print(elem.text) elem.text = '' cls.remove_range(elem.getchildren()[0], end_elem) XML.remove_if_empty(elem) else: print("LOGIC ERROR", file=sys.stderr)
<SYSTEM_TASK:> Within a given node, merge elements that are next to each other <END_TASK> <USER_TASK:> Description: def merge_contiguous(C, node, xpath, namespaces=None): """Within a given node, merge elements that are next to each other if they have the same tag and attributes. """
new_node = deepcopy(node) elems = XML.xpath(new_node, xpath, namespaces=namespaces) elems.reverse() for elem in elems: nxt = elem.getnext() if elem.attrib == {}: XML.replace_with_contents(elem) elif ( elem.tail in [None, ''] and nxt is not None and elem.tag == nxt.tag and elem.attrib == nxt.attrib ): # merge nxt with elem # -- append nxt.text to elem last child tail if len(elem.getchildren()) > 0: lastch = elem.getchildren()[-1] lastch.tail = (lastch.tail or '') + (nxt.text or '') else: elem.text = (elem.text or '') + (nxt.text or '') # -- append nxt children to elem children for ch in nxt.getchildren(): elem.append(ch) # -- remove nxt XML.remove(nxt, leave_tail=True) return new_node
<SYSTEM_TASK:> unnest the element from its parent within doc. MUTABLE CHANGES <END_TASK> <USER_TASK:> Description: def unnest(c, elem, ignore_whitespace=False): """unnest the element from its parent within doc. MUTABLE CHANGES"""
parent = elem.getparent() gparent = parent.getparent() index = parent.index(elem) # put everything up to elem into a new parent element right before the current parent preparent = etree.Element(parent.tag) preparent.text, parent.text = (parent.text or ''), '' for k in parent.attrib.keys(): preparent.set(k, parent.get(k)) if index > 0: for ch in parent.getchildren()[:index]: preparent.append(ch) gparent.insert(gparent.index(parent), preparent) XML.remove_if_empty(preparent, leave_tail=True, ignore_whitespace=ignore_whitespace) # put the element right before the current parent XML.remove(elem, leave_tail=True) gparent.insert(gparent.index(parent), elem) elem.tail = '' # if the original parent is empty, remove it XML.remove_if_empty(parent, leave_tail=True, ignore_whitespace=ignore_whitespace)
<SYSTEM_TASK:> for elem1 containing elements at xpath, embed elem1 inside each of those elements, <END_TASK> <USER_TASK:> Description: def interior_nesting(cls, elem1, xpath, namespaces=None): """for elem1 containing elements at xpath, embed elem1 inside each of those elements, and then remove the original elem1"""
for elem2 in elem1.xpath(xpath, namespaces=namespaces): child_elem1 = etree.Element(elem1.tag) for k in elem1.attrib: child_elem1.set(k, elem1.get(k)) child_elem1.text, elem2.text = elem2.text, '' for ch in elem2.getchildren(): child_elem1.append(ch) elem2.insert(0, child_elem1) XML.replace_with_contents(elem1)
<SYSTEM_TASK:> for elem1 containing elements with tag2, <END_TASK> <USER_TASK:> Description: def fragment_nesting(cls, elem1, tag2, namespaces=None): """for elem1 containing elements with tag2, fragment elem1 into elems that are adjacent to and nested within tag2"""
elems2 = elem1.xpath("child::%s" % tag2, namespaces=namespaces) while len(elems2) > 0: elem2 = elems2[0] parent2 = elem2.getparent() index2 = parent2.index(elem2) # all of elem2 has a new tag1 element embedded inside of it child_elem1 = etree.Element(elem1.tag) for k in elem1.attrib: child_elem1.set(k, elem1.get(k)) elem2.text, child_elem1.text = '', elem2.text for ch in elem2.getchildren(): child_elem1.append(ch) elem2.insert(0, child_elem1) # new_elem1 for all following children of parent2 new_elem1 = etree.Element(elem1.tag) for k in elem1.attrib: new_elem1.set(k, elem1.get(k)) new_elem1.text, elem2.tail = elem2.tail, '' for ch in parent2.getchildren()[index2 + 1 :]: new_elem1.append(ch) # elem2 is placed after parent2 parent = parent2.getparent() parent.insert(parent.index(parent2) + 1, elem2) last_child = elem2 # new_elem1 is placed after elem2 parent.insert(parent.index(elem2) + 1, new_elem1) new_elem1.tail, elem1.tail = elem1.tail, '' XML.remove_if_empty(elem1) XML.remove_if_empty(new_elem1) # repeat until all tag2 elements are unpacked from the new_elem1 elem1 = new_elem1 elems2 = elem1.xpath("child::%s" % tag2, namespaces=namespaces)
<SYSTEM_TASK:> Return the Visibility State of the Users Profile <END_TASK> <USER_TASK:> Description: def communityvisibilitystate(self): """Return the Visibility State of the Users Profile"""
if self._communityvisibilitystate == None: return None elif self._communityvisibilitystate in self.VisibilityState: return self.VisibilityState[self._communityvisibilitystate] else: #Invalid State return None
<SYSTEM_TASK:> given a dataflashlog in the format produced by Mission Planner, <END_TASK> <USER_TASK:> Description: def logpath2dt(filepath): """ given a dataflashlog in the format produced by Mission Planner, return a datetime which says when the file was downloaded from the APM """
return datetime.datetime.strptime(re.match(r'.*/(.*) .*$',filepath).groups()[0],'%Y-%m-%d %H-%M')
<SYSTEM_TASK:> Read holidays from an iCalendar-format file. <END_TASK> <USER_TASK:> Description: def _read_holidays(self, filename): """ Read holidays from an iCalendar-format file. """
cal = Calendar.from_ical(open(filename, 'rb').read()) holidays = [] for component in cal.walk('VEVENT'): start = component.decoded('DTSTART') try: end = component.decoded('DTEND') except KeyError: # RFC allows DTEND to be missing if isinstance(start, datetime): # For DATETIME instances, the event ends immediately. end = start elif isinstance(start, date): # For DATE instances, the event ends tomorrow end = start + timedelta(days=1) else: raise KeyError, 'DTEND is missing and DTSTART is not of DATE or DATETIME type' if isinstance(start, date) and not isinstance(start, datetime): assert (isinstance(end, date) and not isinstance(end, datetime)), \ 'DTSTART is of DATE type but DTEND is not of DATE type (got %r instead)' % type(end) # All-day event, set times to midnight local time start = datetime.combine(start, time.min) end = datetime.combine(end, time.min) # check for TZ data if start.tzinfo is None or end.tzinfo is None: # One of them is missing tzinfo, replace both with this office's # local time. Assume standard time if ambiguous. start = self.tz.localize(start, is_dst=False) end = self.tz.localize(end, is_dst=False) yield (start, end)
<SYSTEM_TASK:> Finds if it is business hours in the given office. <END_TASK> <USER_TASK:> Description: def in_hours(self, office=None, when=None): """ Finds if it is business hours in the given office. :param office: Office ID to look up, or None to check if any office is in business hours. :type office: str or None :param datetime.datetime when: When to check the office is open, or None for now. :returns: True if it is business hours, False otherwise. :rtype: bool :raises KeyError: If the office is unknown. """
if when == None: when = datetime.now(tz=utc) if office == None: for office in self.offices.itervalues(): if office.in_hours(when): return True return False else: # check specific office return self.offices[office].in_hours(when)
<SYSTEM_TASK:> Factory for creating file objects <END_TASK> <USER_TASK:> Description: def opener(mode='r'): """Factory for creating file objects Keyword Arguments: - mode -- A string indicating how the file is to be opened. Accepts the same values as the builtin open() function. - bufsize -- The file's desired buffer size. Accepts the same values as the builtin open() function. """
def open_file(f): if f is sys.stdout or f is sys.stdin: return f elif f == '-': return sys.stdin if 'r' in mode else sys.stdout elif f.endswith('.bz2'): return bz2.BZ2File(f, mode) elif f.endswith('.gz'): return gzip.open(f, mode) else: return open(f, mode) return open_file
<SYSTEM_TASK:> List details for a specific tenant id <END_TASK> <USER_TASK:> Description: def get(self, id, no_summary=False): """ List details for a specific tenant id """
resp = self.client.accounts.get(id) if no_summary: return self.display(resp) results = [] # Get a list of all volumes for this tenant id client = LunrClient(self.get_admin(), debug=self.debug) volumes = client.volumes.list(account_id=resp['id']) #volumes = self.client.volumes.list(resp['id']) for volume in volumes: if volume['status'] == 'DELETED': continue results.append(volume) self.display(resp, ['name', 'status', 'last_modified', 'created_at']) if results: return self.display(response(results, 200), ['id', 'status', 'size']) else: print("-- This account has no active volumes --") print("\nThis is a summary, use --no-summary " "to see the entire response")
<SYSTEM_TASK:> Main method for debug purposes. <END_TASK> <USER_TASK:> Description: def main(): """Main method for debug purposes."""
parser = argparse.ArgumentParser() group_tcp = parser.add_argument_group('TCP') group_tcp.add_argument('--tcp', dest='mode', action='store_const', const=PROP_MODE_TCP, help="Set tcp mode") group_tcp.add_argument('--host', dest='hostname', help="Specify hostname", default='') group_tcp.add_argument('--port', dest='port', help="Specify port", default=23, type=int) group_serial = parser.add_argument_group('Serial') group_serial.add_argument('--serial', dest='mode', action='store_const', const=PROP_MODE_SERIAL, help="Set serial mode") group_serial.add_argument('--interface', dest='interface', help="Specify interface", default='') group_file = parser.add_argument_group('File') group_file.add_argument('--file', dest='mode', action='store_const', const=PROP_MODE_FILE, help="Set file mode") group_file.add_argument('--name', dest='file', help="Specify file name", default='') args = parser.parse_args() kwb = KWBEasyfire(args.mode, args.hostname, args.port, args.interface, 0, args.file) kwb.run_thread() time.sleep(5) kwb.stop_thread() print(kwb)
<SYSTEM_TASK:> Open a connection to the easyfire unit. <END_TASK> <USER_TASK:> Description: def _open_connection(self): """Open a connection to the easyfire unit."""
if (self._mode == PROP_MODE_SERIAL): self._serial = serial.Serial(self._serial_device, self._serial_speed) elif (self._mode == PROP_MODE_TCP): self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self._socket.connect((self._ip, self._port)) elif (self._mode == PROP_MODE_FILE): self._file = open(self._file_path, "r")
<SYSTEM_TASK:> Close the connection to the easyfire unit. <END_TASK> <USER_TASK:> Description: def _close_connection(self): """Close the connection to the easyfire unit."""
if (self._mode == PROP_MODE_SERIAL): self._serial.close() elif (self._mode == PROP_MODE_TCP): self._socket.close() elif (self._mode == PROP_MODE_FILE): self._file.close()
<SYSTEM_TASK:> Add a byte to the checksum. <END_TASK> <USER_TASK:> Description: def _add_to_checksum(self, checksum, value): """Add a byte to the checksum."""
checksum = self._byte_rot_left(checksum, 1) checksum = checksum + value if (checksum > 255): checksum = checksum - 255 self._debug(PROP_LOGLEVEL_TRACE, "C: " + str(checksum) + " V: " + str(value)) return checksum
<SYSTEM_TASK:> Read a byte from input. <END_TASK> <USER_TASK:> Description: def _read_byte(self): """Read a byte from input."""
to_return = "" if (self._mode == PROP_MODE_SERIAL): to_return = self._serial.read(1) elif (self._mode == PROP_MODE_TCP): to_return = self._socket.recv(1) elif (self._mode == PROP_MODE_FILE): to_return = struct.pack("B", int(self._file.readline())) _LOGGER.debug("READ: " + str(ord(to_return))) self._logdata.append(ord(to_return)) if (len(self._logdata) > self._logdatalen): self._logdata = self._logdata[len(self._logdata) - self._logdatalen:] self._debug(PROP_LOGLEVEL_TRACE, "READ: " + str(ord(to_return))) return to_return
<SYSTEM_TASK:> Decode a signed short temperature as two bytes to a single number. <END_TASK> <USER_TASK:> Description: def _decode_temp(byte_1, byte_2): """Decode a signed short temperature as two bytes to a single number."""
temp = (byte_1 << 8) + byte_2 if (temp > 32767): temp = temp - 65536 temp = temp / 10 return temp
<SYSTEM_TASK:> Read a packet from the input. <END_TASK> <USER_TASK:> Description: def _read_packet(self): """Read a packet from the input."""
status = STATUS_WAITING mode = 0 checksum = 0 checksum_calculated = 0 length = 0 version = 0 i = 0 cnt = 0 packet = bytearray(0) while (status != STATUS_PACKET_DONE): read = self._read_ord_byte() if (status != STATUS_CTRL_CHECKSUM and status != STATUS_SENSE_CHECKSUM): checksum_calculated = self._add_to_checksum(checksum_calculated, read) self._debug(PROP_LOGLEVEL_TRACE, "R: " + str(read)) self._debug(PROP_LOGLEVEL_TRACE, "S: " + str(status)) if (status == STATUS_WAITING): if (read == 2): status = STATUS_PRE_1 checksum_calculated = read else: status = STATUS_WAITING elif (status == STATUS_PRE_1): checksum = 0 if (read == 2): status = STATUS_SENSE_PRE_2 checksum_calculated = read elif (read == 0): status = STATUS_WAITING else: status = STATUS_CTRL_PRE_2 elif (status == STATUS_SENSE_PRE_2): length = read status = STATUS_SENSE_PRE_LENGTH elif (status == STATUS_SENSE_PRE_LENGTH): version = read status = STATUS_SENSE_PRE_3 elif (status == STATUS_SENSE_PRE_3): cnt = read i = 0 status = STATUS_SENSE_DATA elif (status == STATUS_SENSE_DATA): packet.append(read) i = i + 1 if (i == length): status = STATUS_SENSE_CHECKSUM elif (status == STATUS_SENSE_CHECKSUM): checksum = read mode = PROP_PACKET_SENSE status = STATUS_PACKET_DONE elif (status == STATUS_CTRL_PRE_2): version = read status = STATUS_CTRL_PRE_3 elif (status == STATUS_CTRL_PRE_3): cnt = read i = 0 length = 16 status = STATUS_CTRL_DATA elif (status == STATUS_CTRL_DATA): packet.append(read) i = i + 1 if (i == length): status = STATUS_CTRL_CHECKSUM elif (status == STATUS_CTRL_CHECKSUM): checksum = read mode = PROP_PACKET_CTRL status = STATUS_PACKET_DONE else: status = STATUS_WAITING self._debug(PROP_LOGLEVEL_DEBUG, "MODE: " + str(mode) + " Version: " + str(version) + " Checksum: " + str(checksum) + " / " + str(checksum_calculated) + " Count: " + str(cnt) + " Length: " + str(len(packet))) self._debug(PROP_LOGLEVEL_TRACE, "Packet: " + str(packet)) return (mode, version, packet)
<SYSTEM_TASK:> Decode a sense packet into the list of sensors. <END_TASK> <USER_TASK:> Description: def _decode_sense_packet(self, version, packet): """Decode a sense packet into the list of sensors."""
data = self._sense_packet_to_data(packet) offset = 4 i = 0 datalen = len(data) - offset - 6 temp_count = int(datalen / 2) temp = [] for i in range(temp_count): temp_index = i * 2 + offset temp.append(self._decode_temp(data[temp_index], data[temp_index + 1])) self._debug(PROP_LOGLEVEL_DEBUG, "T: " + str(temp)) for sensor in self._sense_sensor: if (sensor.sensor_type == PROP_SENSOR_TEMPERATURE): sensor.value = temp[sensor.index] elif (sensor.sensor_type == PROP_SENSOR_RAW): sensor.value = packet self._debug(PROP_LOGLEVEL_DEBUG, str(self))
<SYSTEM_TASK:> Decode a control packet into the list of sensors. <END_TASK> <USER_TASK:> Description: def _decode_ctrl_packet(self, version, packet): """Decode a control packet into the list of sensors."""
for i in range(5): input_bit = packet[i] self._debug(PROP_LOGLEVEL_DEBUG, "Byte " + str(i) + ": " + str((input_bit >> 7) & 1) + str((input_bit >> 6) & 1) + str((input_bit >> 5) & 1) + str((input_bit >> 4) & 1) + str((input_bit >> 3) & 1) + str((input_bit >> 2) & 1) + str((input_bit >> 1) & 1) + str(input_bit & 1)) for sensor in self._ctrl_sensor: if (sensor.sensor_type == PROP_SENSOR_FLAG): sensor.value = (packet[sensor.index // 8] >> (sensor.index % 8)) & 1 elif (sensor.sensor_type == PROP_SENSOR_RAW): sensor.value = packet
<SYSTEM_TASK:> Main thread that reads from input and populates the sensors. <END_TASK> <USER_TASK:> Description: def run(self): """Main thread that reads from input and populates the sensors."""
while (self._run_thread): (mode, version, packet) = self._read_packet() if (mode == PROP_PACKET_SENSE): self._decode_sense_packet(version, packet) elif (mode == PROP_PACKET_CTRL): self._decode_ctrl_packet(version, packet)
<SYSTEM_TASK:> Remove empty parameters from the dict <END_TASK> <USER_TASK:> Description: def unused(self, _dict): """ Remove empty parameters from the dict """
for key, value in _dict.items(): if value is None: del _dict[key] return _dict
<SYSTEM_TASK:> Ensure the required items are in the dictionary <END_TASK> <USER_TASK:> Description: def required(self, method, _dict, require): """ Ensure the required items are in the dictionary """
for key in require: if key not in _dict: raise LunrError("'%s' is required argument for method '%s'" % (key, method))
<SYSTEM_TASK:> Only these items are allowed in the dictionary <END_TASK> <USER_TASK:> Description: def allowed(self, method, _dict, allow): """ Only these items are allowed in the dictionary """
for key in _dict.keys(): if key not in allow: raise LunrError("'%s' is not an argument for method '%s'" % (key, method))
<SYSTEM_TASK:> Returns the python module and obj given an event name <END_TASK> <USER_TASK:> Description: def parse_event_name(name): """Returns the python module and obj given an event name """
try: app, event = name.split('.') return '{}.{}'.format(app, EVENTS_MODULE_NAME), event except ValueError: raise InvalidEventNameError( (u'The name "{}" is invalid. ' u'Make sure you are using the "app.KlassName" format' ).format(name))
<SYSTEM_TASK:> Actually import the event represented by name <END_TASK> <USER_TASK:> Description: def find_event(name): """Actually import the event represented by name Raises the `EventNotFoundError` if it's not possible to find the event class refered by `name`. """
try: module, klass = parse_event_name(name) return getattr(import_module(module), klass) except (ImportError, AttributeError): raise EventNotFoundError( ('Event "{}" not found. ' 'Make sure you have a class called "{}" inside the "{}" ' 'module.'.format(name, klass, module)))
<SYSTEM_TASK:> Remove handlers of a given `event`. If no event is informed, wipe <END_TASK> <USER_TASK:> Description: def cleanup_handlers(event=None): """Remove handlers of a given `event`. If no event is informed, wipe out all events registered. Be careful!! This function is intended to help when writing tests and for debugging purposes. If you call it, all handlers associated to an event (or to all of them) will be disassociated. Which means that you'll have to reload all modules that teclare handlers. I'm sure you don't want it. """
if event: if event in HANDLER_REGISTRY: del HANDLER_REGISTRY[event] if event in EXTERNAL_HANDLER_REGISTRY: del EXTERNAL_HANDLER_REGISTRY[event] else: HANDLER_REGISTRY.clear() EXTERNAL_HANDLER_REGISTRY.clear()