text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Main functionality of _match, but works only on rows. <END_TASK> <USER_TASK:> Description: def __match_rows(self, optimized_lines=None, transpose=False): """Main functionality of _match, but works only on rows. Full matches are found by running this once with original board and once with a transposed board. Arguments: optimized_lines is an optional argument that identifies the lines that need to be matched. transpose indicates whether the match is looking at rows or columns """
MIN_LENGTH = 3 a = self._array if transpose: a = a.T rows = optimized_lines or range(8) # check for matches in each row separately for row in rows: NUM_COLUMNS = 8 match_length = 1 start_position = 0 # next tile pointer #set next start position as long as a match is still possible while start_position + MIN_LENGTH <= NUM_COLUMNS: group_type = a[row, start_position] # try to increase match length as long as there is room while start_position + match_length + 1 <= NUM_COLUMNS: next_tile = a[row, start_position + match_length] # if no match, stop looking for further matches if not group_type.matches(next_tile): break # if group_type is wildcard, try to find a real type if group_type.is_wildcard(): group_type = next_tile match_length += 1 #produce a matched position group if the current match qualifies if match_length >= MIN_LENGTH and not group_type.is_wildcard(): row_ = row target_positions = [(row_, col_) for col_ in range(start_position, start_position + match_length)] if transpose: target_positions = [(col_, row_) for row_, col_ in target_positions] yield target_positions #setup for continuing to look for matches after the current one start_position += match_length match_length = 1
<SYSTEM_TASK:> Destroy indicated position groups, handle any chain destructions, <END_TASK> <USER_TASK:> Description: def _destroy(self, target_position_groups): """Destroy indicated position groups, handle any chain destructions, and return all destroyed groups."""
target_position_groups = list(target_position_groups) # work on a copy destroyed_tile_groups = list() blank = Tile.singleton('.') a = self._array while target_position_groups: # continue as long as more targets exist # delay actual clearing of destroyed tiles until all claiming # groups have been stored (e.g. overlapping matches, bombs) clear_after_storing = list() new_target_position_groups = list() for target_position_group in target_position_groups: destroyed_tile_group = list() for target_position in target_position_group: target_tile = a[target_position] # no handling for blanks that appear in destruction if target_tile.is_blank(): continue destroyed_tile_group.append(target_tile) clear_after_storing.append(target_position) # skull bombs require further destructions if target_tile.is_skullbomb(): new_positions = self.__skullbomb_radius(target_position) # convert individual positions to position groups new_position_groups = [(new_position,) for new_position in new_positions] new_target_position_groups.extend(new_position_groups) if destroyed_tile_group: destroyed_tile_groups.append(destroyed_tile_group) # Finally clear positions after all records have been made for position in clear_after_storing: a[position] = blank # Replace the completed target position groups with any new ones target_position_groups = new_target_position_groups return destroyed_tile_groups
<SYSTEM_TASK:> Generate all valid positions in the square around position. <END_TASK> <USER_TASK:> Description: def __skullbomb_radius(self, position): """Generate all valid positions in the square around position."""
#get the boundaries of the explosion sb_row, sb_col = position left = max(sb_row - 1, 0) # standard radius or 0 if out of bounds right = min(sb_row + 1, 7) # standard radius or 7 if out of bounds top = max(sb_col - 1, 0) bottom = min(sb_col + 1, 7) for explosion_row in xrange(left, right + 1): for explosion_col in xrange(top, bottom + 1): yield (explosion_row, explosion_col)
<SYSTEM_TASK:> Cause tiles to fall down to fill blanks below them. <END_TASK> <USER_TASK:> Description: def _fall(self): """Cause tiles to fall down to fill blanks below them."""
a = self._array for column in [a[:, c] for c in range(a.shape[1])]: # find blanks and fill them with tiles above them target_p = column.shape[0] - 1 # start at the bottom fall_distance = 1 # increases every time a new gap is found while target_p - fall_distance >= 0: # step up the target position if column[target_p].is_blank(): blank = column[target_p] # move the blank #find the next nonblank position while target_p - fall_distance >= 0: next_p = target_p - fall_distance if column[next_p].is_blank(): fall_distance += 1 else: break # stop expanding blank space when nonblank if target_p - fall_distance >= 0: #move the nonblank position to the target if gap exists source_position = target_p - fall_distance column[target_p] = column[source_position] column[source_position] = blank #in any case, move on to the next target position target_p -= 1
<SYSTEM_TASK:> Fill the board with random tiles based on the Tile class. <END_TASK> <USER_TASK:> Description: def _random_fill(self): """Fill the board with random tiles based on the Tile class."""
a = self._array for p, tile in self.positions_with_tile(): if tile.is_blank(): a[p] = Tile.random_tile()
<SYSTEM_TASK:> Generate a sequence of at least all valid swaps for this board. <END_TASK> <USER_TASK:> Description: def potential_swaps(self): """Generate a sequence of at least all valid swaps for this board. The built-in optimizer filters out many meaningless swaps, but not all. """
a = self._array rows, cols = a.shape for this_position, tile in self.positions_with_tile(): #produce horizontal swap for this position r, c = this_position if c < cols - 1: other_position = (r, c + 1) if self._swap_optimizer_allows(this_position, other_position): yield (this_position, other_position) #produce vertical swap for this position. not DRY but maybe ok if r < rows - 1: other_position = (r + 1, c) if self._swap_optimizer_allows(this_position, other_position): yield (this_position, other_position)
<SYSTEM_TASK:> Identify easily discarded meaningless swaps. <END_TASK> <USER_TASK:> Description: def _swap_optimizer_allows(self, p1, p2): """Identify easily discarded meaningless swaps. This is motivated by the cost of millions of swaps being simulated. """
# setup local shortcuts a = self._array tile1 = a[p1] tile2 = a[p2] # 1) disallow same tiles if tile1 == tile2: return False # 2) disallow matches unless a wildcard is involved if tile1.matches(tile2) and not any(t.is_wildcard() for t in (tile1, tile2)): return False # 3) disallow when both tiles (post-swap) are surrounded by non-matches center_other_pairs = ((p1, p2), (p2, p1)) class MatchedTiles(Exception): pass try: for center_p, other_p in center_other_pairs: up_down_left_right = ((center_p[0] - 1, center_p[1]), (center_p[0] + 1, center_p[1]), (center_p[0], center_p[1] - 1), (center_p[0], center_p[1] + 1)) post_swap_center_tile = a[other_p] for surrounding_p in up_down_left_right: # ignore out of bounds positions # and ignore the inner swap which is handled elsewhere if any((not (0 <= surrounding_p[0] <= 7), # out of bounds not (0 <= surrounding_p[1] <= 7), # out of bounds surrounding_p == other_p)): # inner swap continue surrounding_tile = a[surrounding_p] if post_swap_center_tile.matches(surrounding_tile): raise MatchedTiles() except MatchedTiles: pass # if any match found, stop checking and pass this filter else: return False # if no match is found, then this can be filtered return True
<SYSTEM_TASK:> Return a copy of this actor with the same attribute values. <END_TASK> <USER_TASK:> Description: def copy(self): """Return a copy of this actor with the same attribute values."""
health = self.health, self.health_max r = self.r, self.r_max g = self.g, self.g_max b = self.b, self.b_max y = self.y, self.y_max x = self.x, self.x_max m = self.m, self.m_max h = self.h, self.h_max c = self.c, self.c_max return self.__class__(self.name, health, r, g, b, y, x, m, h, c)
<SYSTEM_TASK:> Increase mana, xp, money, anvils and scrolls based on tile groups. <END_TASK> <USER_TASK:> Description: def apply_tile_groups(self, tile_groups): """Increase mana, xp, money, anvils and scrolls based on tile groups."""
self_increase_types = ('r', 'g', 'b', 'y', 'x', 'm', 'h', 'c') attack_types = ('s', '*') total_attack = 0 for tile_group in tile_groups: group_type = None type_count = 0 type_multiplier = 1 for tile in tile_group: # try to get the group type if not group_type: # try to set the group type to a non-wildcard type if tile._type in self_increase_types: group_type = tile._type elif tile._type in attack_types: group_type = 's' # always use base skull # handle special case of wildcard if tile.is_wildcard(): type_multiplier *= int(tile._type) continue # done with this tile # handle special case of skullbomb / skull elif tile.is_skullbomb(): total_attack += 5 continue elif tile.is_skull(): total_attack += 1 continue # handle standard case of normal tiles else: type_count += 1 if group_type is None: continue # ignore this group. could be all wildcards or empty # adjust self value if type_count: new_value = type_count * type_multiplier original = getattr(self, group_type) setattr(self, group_type, original + new_value) # return any attack value return total_attack
<SYSTEM_TASK:> Copy a tree of files over. <END_TASK> <USER_TASK:> Description: def copytree(src, dst, overwrite=False, ignore_list=None, debug=False): """ Copy a tree of files over. Ignores a file if it already exists at the destination. """
if ignore_list is None: ignore_list = [] if debug: print('copytree {} to {}'.format(src, dst)) for child in Path(src).iterdir(): if debug: print(" on file {}".format(child)) if child.name not in ignore_list: if child.is_dir(): new_dir = Path(dst) / child.relative_to(src) new_dir.mkdir(exist_ok=True) # call recursively copytree(child, new_dir, overwrite, ignore_list, debug) elif child.is_file(): new_file = new_dir = Path(dst) / child.relative_to(src) if debug: print(" to file {}".format(new_file)) if new_file.exists(): if overwrite: new_file.unlink() shutil.copy2(child, new_file) else: shutil.copy2(child, new_file) else: if debug: print(" Skipping copy of {}".format(child))
<SYSTEM_TASK:> Acquire a read lock for the current thread, waiting at most <END_TASK> <USER_TASK:> Description: def acquire_read(self, timeout=None): """ Acquire a read lock for the current thread, waiting at most timeout seconds or doing a non-blocking check in case timeout is <= 0. In case timeout is None, the call to acquire_read blocks until the lock request can be serviced. If the lock has been successfully acquired, this function returns True, on a timeout it returns None. """
if timeout is not None: endtime = time.time() + timeout me = threading.currentThread() self.__condition.acquire() try: if self.__writer is me: self.__writer_lock_count += 1 return True while True: if self.__writer is None: if self.__pending_writers: if me in self.__readers: # Grant the lock anyway if we already # hold one, because this would otherwise # cause a deadlock between the pending # writers and ourself. self.__readers[me] += 1 return True # else: does nothing, will wait below # writers are given priority else: self.__readers[me] = self.__readers.get(me, 0) + 1 return True if timeout is not None: remaining = endtime - time.time() if remaining <= 0: return None self.__condition.wait(remaining) else: self.__condition.wait() finally: self.__condition.release()
<SYSTEM_TASK:> Acquire a write lock for the current thread, waiting at most <END_TASK> <USER_TASK:> Description: def acquire_write(self, timeout=None): """ Acquire a write lock for the current thread, waiting at most timeout seconds or doing a non-blocking check in case timeout is <= 0. In case timeout is None, the call to acquire_write blocks until the lock request can be serviced. If the lock has been successfully acquired, this function returns True. On a timeout it returns None. In case a trivial deadlock condition is detected (the current thread already hold a reader lock) it returns False. """
if timeout is not None: endtime = time.time() + timeout me = threading.currentThread() self.__condition.acquire() try: if self.__writer is me: self.__writer_lock_count += 1 return True if me in self.__readers: # trivial deadlock detected (we do not handle promotion) return False self.__pending_writers.append(me) while True: if not self.__readers and self.__writer is None and self.__pending_writers[0] is me: self.__writer = me self.__writer_lock_count = 1 self.__pending_writers = self.__pending_writers[1:] return True if timeout is not None: remaining = endtime - time.time() if remaining <= 0: self.__pending_writers.remove(me) return None self.__condition.wait(remaining) else: self.__condition.wait() finally: self.__condition.release()
<SYSTEM_TASK:> Release the currently held lock. <END_TASK> <USER_TASK:> Description: def release(self): """ Release the currently held lock. In case the current thread holds no lock, a thread.error is thrown. """
me = threading.currentThread() self.__condition.acquire() try: if self.__writer is me: self.__writer_lock_count -= 1 if self.__writer_lock_count == 0: self.__writer = None self.__condition.notifyAll() elif me in self.__readers: self.__readers[me] -= 1 if self.__readers[me] == 0: del self.__readers[me] if not self.__readers: self.__condition.notifyAll() else: raise thread.error, "release unlocked lock" finally: self.__condition.release()
<SYSTEM_TASK:> elt must be hashable. <END_TASK> <USER_TASK:> Description: def try_acquire(self, elt): """ elt must be hashable. If not yet locked, elt is captured for the current thread and True is returned. If already locked by the current thread its recursive counter is incremented and True is returned. If already locked by another thread False is returned. """
me = threading.currentThread() self.lock.acquire() try: if elt not in self.locked: self.locked[elt] = [me, 1] return True elif self.locked[elt][0] == me: self.locked[elt][1] += 1 return True finally: self.lock.release() return False
<SYSTEM_TASK:> Get a string value from the componnet. <END_TASK> <USER_TASK:> Description: def get(self, key, raw=False, fallback=None): """ Get a string value from the componnet. Arguments: key - the key to retrieve raw - Control whether the value is interpolated or returned raw. By default, values are interpolated. fallback - The return value if key isn't in the component. """
return self._component.get(key, raw=raw, fallback=fallback)
<SYSTEM_TASK:> Retrieve a value in list form. <END_TASK> <USER_TASK:> Description: def get_list(self, key, fallback=None, split=","): """ Retrieve a value in list form. The interpolated value will be split on some key (by default, ',') and the resulting list will be returned. Arguments: key - the key to return fallback - The result to return if key isn't in the component. By default, this will be an empty list. split - The key to split the value on. By default, a comma (,). """
fallback = fallback or [] raw = self.get(key, None) if raw: return [value.strip() for value in raw.split(split)] return fallback
<SYSTEM_TASK:> Set a value in the component. <END_TASK> <USER_TASK:> Description: def set(self, key, value): """ Set a value in the component. Arguments: key - the key to set value - the new value """
if self._component.get(key, raw=True) != value: self._component[key] = value self._main_config.dirty = True
<SYSTEM_TASK:> Discovers if a request is from a knwon spam bot and denies access. <END_TASK> <USER_TASK:> Description: def process_request(self, request): """Discovers if a request is from a knwon spam bot and denies access."""
if COOKIE_KEY in request.COOKIES and \ request.COOKIES[COOKIE_KEY] == COOKIE_SPAM: # Is a known spammer. response = HttpResponse("") # We do not reveal why it has been forbbiden: response.status_code = 404 if DJANGOSPAM_LOG: logger.log("SPAM REQUEST", request.method, request.path_info, request.META.get("HTTP_USER_AGENT", "undefined")) return response if DJANGOSPAM_LOG: logger.log("PASS REQUEST", request.method, request.path_info, request.META.get("HTTP_USER_AGENT", "undefined")) return None
<SYSTEM_TASK:> Sets "Ok" cookie on unknown users. <END_TASK> <USER_TASK:> Description: def process_response(self, request, response): """Sets "Ok" cookie on unknown users."""
if COOKIE_KEY not in request.COOKIES: # Unknown user, set cookie and go on... response.set_cookie(COOKIE_KEY, COOKIE_PASS, httponly=True, expires=datetime.now()+timedelta(days=30)) # Only logged if we have to set the PASS cookie if DJANGOSPAM_LOG: logger.log("PASS RESPONSE", request.method, request.path_info, request.META.get("HTTP_USER_AGENT", "undefined")) return response
<SYSTEM_TASK:> Parse the version information from the init file <END_TASK> <USER_TASK:> Description: def get_version(): """ Parse the version information from the init file """
version_file = os.path.join("paps_settings", "__init__.py") initfile_lines = open(version_file, 'rt').readlines() version_reg = r"^__version__ = ['\"]([^'\"]*)['\"]" for line in initfile_lines: mo = re.search(version_reg, line, re.M) if mo: return mo.group(1) raise RuntimeError( "Unable to find version string in {}".format(version_file) )
<SYSTEM_TASK:> Convert the first letter of each word to capital letter without <END_TASK> <USER_TASK:> Description: def inicap_string(string, cleanse=False): """ Convert the first letter of each word to capital letter without touching the rest of the word. @param string: a string. @param cleanse: ``True`` to remove any separator character from the string, such as comma; ``False`` to keeps any character but space. @return: a string for which the first letter of each word has been capitalized without modifying the case of the rest of the word. """
return string and ' '.join(word[0].upper() + word[1:] for word in (string.split() if cleanse else string.split(' ')))
<SYSTEM_TASK:> Check whether the specified string corresponds to an email address. <END_TASK> <USER_TASK:> Description: def is_email_address(string): """ Check whether the specified string corresponds to an email address. @param string: a string that is expected to be an email address. @return: ``True`` if the string corresponds to an email address, ``False`` otherwise. """
return string and isinstance(string, base_string) \ and REGEX_PATTERN_EMAIL_ADDRESS.match(string.strip().lower())
<SYSTEM_TASK:> Remove any punctuation character from the specified list of keywords, <END_TASK> <USER_TASK:> Description: def string_to_keywords(string, keyword_minimal_length=1): """ Remove any punctuation character from the specified list of keywords, remove any double or more space character and represent Unicode characters in ASCII. @param keywords: a list of keywords strip out any punctuation characters. @param keyword_minimal_length: minimal number of characters of the keywords to be returned. @return: the set of keywords cleansed from any special Unicode accentuated character, punctuation character, and double space character. """
# Convert the string to ASCII lower characters. ascii_string = unidecode.unidecode(string).lower() # Replace any punctuation character with space. punctuationless_string = re.sub(r"""[.,\\/#!$%\^&*;:{}=\-_`~()<>"']""", ' ', ascii_string) # Remove any double space character. cleansed_string = re.sub(r'\s{2,}', ' ', punctuationless_string) # Decompose the string into distinct keywords. keywords = set(cleansed_string.split(' ')) # Filter out sub-keywords of less than 2 characters. return [ keyword for keyword in keywords if len(keyword) > keyword_minimal_length ]
<SYSTEM_TASK:> Print dependency information, grouping components based on their position <END_TASK> <USER_TASK:> Description: def _print_layers(targets, components, tasks): """ Print dependency information, grouping components based on their position in the dependency graph. Components with no dependnecies will be in layer 0, components that only depend on layer 0 will be in layer 1, and so on. If there's a circular dependency, those nodes and their dependencies will be colored red. Arguments targets - the targets explicitly requested components - full configuration for all components in a project """
layer = 0 expected_count = len(tasks) counts = {} def _add_layer(resolved, dep_fn): nonlocal layer nonlocal counts nonlocal expected_count really_resolved = [] for resolved_task in resolved: resolved_component_tasks = counts.get(resolved_task[0], []) resolved_component_tasks.append(resolved_task) if len(resolved_component_tasks) == expected_count: really_resolved.extend(resolved_component_tasks) del counts[resolved_task[0]] else: counts[resolved_task[0]] = resolved_component_tasks if really_resolved: indentation = " " * 4 print("{}subgraph cluster_{} {{".format(indentation, layer)) print('{}label="Layer {}"'.format(indentation * 2, layer)) dep_fn(indentation * 2, really_resolved) print("{}}}".format(indentation)) layer += 1 _do_dot(targets, components, tasks, _add_layer)
<SYSTEM_TASK:> Print dependency information using a dot directed graph. The graph will <END_TASK> <USER_TASK:> Description: def _print_graph(targets, components, tasks): """ Print dependency information using a dot directed graph. The graph will contain explicitly requested targets plus any dependencies. If there's a circular dependency, those nodes and their dependencies will be colored red. Arguments targets - the targets explicitly requested components - full configuration for all components in a project """
indentation = " " * 4 _do_dot( targets, components, tasks, lambda resolved, dep_fn: dep_fn(indentation, resolved), )
<SYSTEM_TASK:> Deprecated function; use print_graph. <END_TASK> <USER_TASK:> Description: def _print_dot(targets, components, tasks): """ Deprecated function; use print_graph. Arguments targets - the targets explicitly requested components - full configuration for all components in a project """
print("Warning: dot option is deprecated. Use graph instead.", file=sys.stderr) _print_graph(targets, components, tasks)
<SYSTEM_TASK:> List of currently selected items <END_TASK> <USER_TASK:> Description: def _get_selected_items(self): """List of currently selected items"""
selection = self.get_selection() if selection.get_mode() != gtk.SELECTION_MULTIPLE: raise AttributeError('selected_items only valid for ' 'select_multiple') model, selected_paths = selection.get_selected_rows() result = [] for path in selected_paths: result.append(model[path][0]) return result
<SYSTEM_TASK:> List of currently selected ids <END_TASK> <USER_TASK:> Description: def _get_selected_ids(self): """List of currently selected ids"""
selection = self.get_selection() if selection.get_mode() != gtk.SELECTION_MULTIPLE: raise AttributeError('selected_ids only valid for select_multiple') model, selected_paths = selection.get_selected_rows() if selected_paths: return zip(*selected_paths)[0] else: return ()
<SYSTEM_TASK:> Manually update an item's display in the list <END_TASK> <USER_TASK:> Description: def update(self, item): """Manually update an item's display in the list :param item: The item to be updated. """
self.model.set(self._iter_for(item), 0, item)
<SYSTEM_TASK:> Move an item down in the list. <END_TASK> <USER_TASK:> Description: def move_item_down(self, item): """Move an item down in the list. Essentially swap it with the item below it. :param item: The item to be moved. """
next_iter = self._next_iter_for(item) if next_iter is not None: self.model.swap(self._iter_for(item), next_iter)
<SYSTEM_TASK:> Move an item up in the list. <END_TASK> <USER_TASK:> Description: def move_item_up(self, item): """Move an item up in the list. Essentially swap it with the item above it. :param item: The item to be moved. """
prev_iter = self._prev_iter_for(item) if prev_iter is not None: self.model.swap(prev_iter, self._iter_for(item))
<SYSTEM_TASK:> The item before an item <END_TASK> <USER_TASK:> Description: def item_before(self, item): """The item before an item :param item: The item to get the previous item relative to """
prev_iter = self._prev_iter_for(item) if prev_iter is not None: return self._object_at_iter(prev_iter)
<SYSTEM_TASK:> Set the function to decide visibility of an item <END_TASK> <USER_TASK:> Description: def set_visible_func(self, visible_func): """Set the function to decide visibility of an item :param visible_func: A callable that returns a boolean result to decide if an item should be visible, for example:: def is_visible(item): return True """
self.model_filter.set_visible_func( self._internal_visible_func, visible_func, ) self._visible_func = visible_func self.model_filter.refilter()
<SYSTEM_TASK:> Sort the view by an attribute or key <END_TASK> <USER_TASK:> Description: def sort_by(self, attr_or_key, direction='asc'): """Sort the view by an attribute or key :param attr_or_key: The attribute or key to sort by :param direction: Either `asc` or `desc` indicating the direction of sorting """
# work out the direction if direction in ('+', 'asc', gtk.SORT_ASCENDING): direction = gtk.SORT_ASCENDING elif direction in ('-', 'desc', gtk.SORT_DESCENDING): direction = gtk.SORT_DESCENDING else: raise AttributeError('unrecognised direction') if callable(attr_or_key): # is a key sort_func = self._key_sort_func else: # it's an attribute sort_func = self._attr_sort_func self.model.set_default_sort_func(sort_func, attr_or_key) self.model.set_sort_column_id(-1, direction)
<SYSTEM_TASK:> Insert an item at the specified position in the list. <END_TASK> <USER_TASK:> Description: def insert(self, position, item, select=False): """Insert an item at the specified position in the list. :param position: The position to insert the item at :param item: The item to be added :param select: Whether the item should be selected after adding """
if item in self: raise ValueError("item %s already in list" % item) modeliter = self.model.insert(position, (item,)) self._id_to_iter[id(item)] = modeliter if select: self.selected_item = item self.emit('item-inserted', item, position)
<SYSTEM_TASK:> Add a sequence of items to the end of the list <END_TASK> <USER_TASK:> Description: def extend(self, iter, parent=None): """Add a sequence of items to the end of the list :param iter: The iterable of items to add. :param parent: The node to add the items as a child of, or None for top-level nodes. """
for item in iter: self.append(item, parent)
<SYSTEM_TASK:> Display a node as expanded <END_TASK> <USER_TASK:> Description: def expand_item(self, item, open_all=True): """Display a node as expanded :param item: The item to show expanded :param open_all: Whether all child nodes should be recursively expanded. """
self.expand_row(self._view_path_for(item), open_all)
<SYSTEM_TASK:> Serialize an object to a JSON formatted string. <END_TASK> <USER_TASK:> Description: def jsonify(obj, trimmable=False): """ Serialize an object to a JSON formatted string. @param obj: an instance to convert to a JSON formatted string. @param trimmable: indicate whether null attributes of this object need to be stripped out the JSON formatted string. @return: a JSON formatted string. """
# def jsonify_ex(obj, trimmable=False): # if obj is None: # return None # elif isinstance(obj, (basestring, bool, int, long, float, complex)): # return obj # elif isinstance(obj, (list, set, tuple)): # return [jsonify_ex(item, trimmable=trimmable) for item in obj] # MUST NOT remove nil value # elif isinstance(obj, dict): # return dict([(jsonify_ex(key, trimmable=trimmable), jsonify_ex(value, trimmable=trimmable)) # for (key, value) in obj.iteritems() if not trimmable or value is not None]) # elif isinstance(obj, (uuid.UUID, EnumValue, ISO8601DateTime, Locale)): # return str(obj) # elif isinstance(obj, datetime.datetime): # return str(ISO8601DateTime.from_datetime(obj)) if obj.tzinfo else str(obj) # elif '__dict__' in dir(obj): # return dict([(key, jsonify_ex(value, trimmable=trimmable)) # for (key, value) in obj.__dict__.iteritems() if not trimmable or value is not None]) # else: # return str(obj) # # return json.dumps(jsonify_ex(obj, trimmable=trimmable)) return json.dumps(stringify(obj, trimmable=trimmable))
<SYSTEM_TASK:> Convert an object to a stringified version of this object where the <END_TASK> <USER_TASK:> Description: def stringify(obj, trimmable=False): """ Convert an object to a stringified version of this object where the initial object's attribute values have been stringified when these values are not native Python types. This function differs from the standard Python ``str`` as the latter returns a string version of object, while this function returns an object which attributes have native Python types. @param obj: an instance to return its stringified version. @param trimmable: indicate whether null attributes of this object need to be stripped out. @return: an object which attributes values are either a native Python types, either a string. """
if obj is None: return None elif isinstance(obj, (basestring, bool, int, long, float, complex)): return obj elif isinstance(obj, (list, set, tuple)): return [stringify(item, trimmable=trimmable) for item in obj] elif isinstance(obj, dict): return dict([(unicode(key), stringify(value, trimmable=trimmable)) for (key, value) in obj.iteritems() if not trimmable or value is not None]) elif isinstance(obj, (uuid.UUID, EnumValue, ISO8601DateTime, Locale)): return unicode(obj) elif isinstance(obj, datetime.datetime): return str(ISO8601DateTime.from_datetime(obj)) if obj.tzinfo else str(obj) elif hasattr(obj, '__dict__'): # '__dict__' in dir(obj): return dict([(key, stringify(value, trimmable=trimmable)) for (key, value) in obj.__dict__.iteritems() if not trimmable or value is not None]) else: return unicode(obj)
<SYSTEM_TASK:> Return a shallow copy of the given object, including only the <END_TASK> <USER_TASK:> Description: def shallow_copy(obj, attribute_names, ignore_missing_attributes=True): """ Return a shallow copy of the given object, including only the specified attributes of this object. @param obj: an object to copy. @param attribute_names: a list of names of the attributes to copy. @param ignore_missing_attributes: ``False`` indicates that the function can ignore attributes that have been specified but that are not defined in the given object; ``True`` indicates that the function MUST raise a ``KeyError`` exception if some specified attributes are not defined in the given object. @return: a shallow copy of the given object with the specified attributes only. @raise KeyError: if the argument ``ignore_missing_attributes`` equals ``False`` and if some specified attributes are not defined in the the given object. """
shallow_object = copy.copy(obj) shallow_object.__dict__ = {} for attribute_name in attribute_names: try: setattr(shallow_object, attribute_name, getattr(obj, attribute_name)) except KeyError, error: if not ignore_missing_attributes: raise error return shallow_object
<SYSTEM_TASK:> Build an object from a JSON dictionary. <END_TASK> <USER_TASK:> Description: def from_json(payload): """ Build an object from a JSON dictionary. @param payload: a JSON dictionary which key/value pairs represent the members of the Python object to build, or ``None``. @return: an instance ``Object`` with members built from the key/value pairs of the given JSON dictionary, or ``None`` if the payload is ``None`` or the given JSON dictionary is empty. """
if payload is None: return None if isinstance(payload, dict): return Object(**dict([(k, v if not isinstance(v, (dict, list)) else Object.from_json(v)) for (k, v) in payload.iteritems()])) elif isinstance(payload, list): return payload and [Object.from_json(v) if isinstance(v, (dict, list)) else v for v in payload] else: raise ValueError('The payload MUST be a dictionary or a list')
<SYSTEM_TASK:> Tests comment post requests for the djangospam cookie. <END_TASK> <USER_TASK:> Description: def allow(self, comment, content_object, request): """Tests comment post requests for the djangospam cookie."""
# Tests for cookie: if settings.COOKIE_KEY not in request.COOKIES \ and (settings.DISCARD_SPAM or settings.DISCARD_NO_COOKIE): return False elif settings.COOKIE_KEY not in request.COOKIES: comment.is_removed = True comment.is_public = False return True return True
<SYSTEM_TASK:> Base validation method. Check if type is valid, or try brute casting. <END_TASK> <USER_TASK:> Description: def validate(self, value): """Base validation method. Check if type is valid, or try brute casting. Args: value (object): A value for validation. Returns: Base_type instance. Raises: SchemaError, if validation or type casting fails. """
cast_callback = self.cast_callback if self.cast_callback else self.cast_type try: return value if isinstance(value, self.cast_type) else cast_callback(value) except Exception: raise NodeTypeError('Invalid value `{}` for {}.'.format(value, self.cast_type))
<SYSTEM_TASK:> Determine if a directory is acceptable for building. <END_TASK> <USER_TASK:> Description: def _is_cache_dir_appropriate(cache_dir, cache_file): """ Determine if a directory is acceptable for building. A directory is suitable if any of the following are true: - it doesn't exist - it is empty - it contains an existing build cache """
if os.path.exists(cache_dir): files = os.listdir(cache_dir) if cache_file in files: return True return not bool(files) return True
<SYSTEM_TASK:> Read a build configuration and create it, storing the result in a build <END_TASK> <USER_TASK:> Description: def process_config(raw_path, cache_dir, cache_file, **kwargs): """ Read a build configuration and create it, storing the result in a build cache. Arguments raw_path -- path to a build configuration cache_dir -- the directory where cache should be written cache_file -- The filename to write the cache. This will live inside cache_dir. **kwargs -- additional arguments used by some modifiers """
config = _create_cache(raw_path, cache_dir, cache_file) for modifier in _CONFIG_MODIFIERS: modifier(config, **kwargs) # pylint: disable=protected-access cache = devpipeline_configure.cache._CachedConfig( config, os.path.join(cache_dir, cache_file) ) _handle_value_modifiers(cache) _add_package_options(cache) _write_config(cache, cache_dir) return cache
<SYSTEM_TASK:> Returns the urls for the model. <END_TASK> <USER_TASK:> Description: def get_urls(self): """ Returns the urls for the model. """
urls = super(IPAdmin, self).get_urls() my_urls = patterns( '', url(r'^batch_process_ips/$', self.admin_site.admin_view(self.batch_process_ips_view), name='batch_process_ips_view') ) return my_urls + urls
<SYSTEM_TASK:> Determine amplitude and offset-corrected phase from a field <END_TASK> <USER_TASK:> Description: def field2ap_corr(field): """Determine amplitude and offset-corrected phase from a field The phase jumps sometimes appear after phase unwrapping. Parameters ---------- field: 2d complex np.ndarray Complex input field Returns ------- amp: 2d real np.ndarray Amplitude data pha: 2d real np.ndarray Phase data, corrected for 2PI offsets """
phase = unwrap.unwrap_phase(np.angle(field), seed=47) samples = [] samples.append(phase[:, :3].flatten()) samples.append(phase[:, -3:].flatten()) samples.append(phase[:3, 3:-3].flatten()) samples.append(phase[-3:, 3:-3].flatten()) pha_offset = np.median(np.hstack(samples)) num_2pi = np.round(pha_offset / (2 * np.pi)) phase -= num_2pi * 2 * np.pi ampli = np.abs(field) return ampli, phase
<SYSTEM_TASK:> Mie-simulated field behind a dielectric sphere <END_TASK> <USER_TASK:> Description: def mie(radius=5e-6, sphere_index=1.339, medium_index=1.333, wavelength=550e-9, pixel_size=1e-7, grid_size=(80, 80), center=(39.5, 39.5), focus=0, arp=True): """Mie-simulated field behind a dielectric sphere Parameters ---------- radius: float Radius of the sphere [m] sphere_index: float Refractive index of the sphere medium_index: float Refractive index of the surrounding medium wavelength: float Vacuum wavelength of the imaging light [m] pixel_size: float Pixel size [m] grid_size: tuple of floats Resulting image size in x and y [px] center: tuple of floats Center position in image coordinates [px] focus: float .. versionadded:: 0.5.0 Axial focus position [m] measured from the center of the sphere in the direction of light propagation. arp: bool Use arbitrary precision (ARPREC) in BHFIELD computations Returns ------- qpi: qpimage.QPImage Quantitative phase data set """
# simulation parameters radius_um = radius * 1e6 # radius of sphere in um propd_um = radius_um # simulate propagation through full sphere propd_lamd = radius / wavelength # radius in wavelengths wave_nm = wavelength * 1e9 # Qpsphere models define the position of the sphere with an index in # the array (because it is easier to work with). The pixel # indices run from (0, 0) to grid_size (without endpoint). BHFIELD # requires the extent to be given in µm. The distance in µm between # first and last pixel (measured from pixel center) is # (grid_size - 1) * pixel_size, size_um = (np.array(grid_size) - 1) * pixel_size * 1e6 # The same holds for the offset. If we use size_um here, # we already take into account the half-pixel offset. offset_um = np.array(center) * pixel_size * 1e6 - size_um / 2 kwargs = {"radius_sphere_um": radius_um, "refractive_index_medium": medium_index, "refractive_index_sphere": sphere_index, "measurement_position_um": propd_um, "wavelength_nm": wave_nm, "size_simulation_um": size_um, "shape_grid": grid_size, "offset_x_um": offset_um[0], "offset_y_um": offset_um[1]} background = np.exp(1j * 2 * np.pi * propd_lamd * medium_index) field = simulate_sphere(arp=arp, **kwargs) / background # refocus refoc = nrefocus.refocus(field, d=-((radius+focus) / pixel_size), nm=medium_index, res=wavelength / pixel_size) # Phase (2PI offset corrected) and amplitude amp, pha = field2ap_corr(refoc) meta_data = {"pixel size": pixel_size, "wavelength": wavelength, "medium index": medium_index, "sim center": center, "sim radius": radius, "sim index": sphere_index, "sim model": "mie", } qpi = qpimage.QPImage(data=(pha, amp), which_data="phase,amplitude", meta_data=meta_data) return qpi
<SYSTEM_TASK:> Get an item from a dict which contains just one item. <END_TASK> <USER_TASK:> Description: def get_single_item(d): """Get an item from a dict which contains just one item."""
assert len(d) == 1, 'Single-item dict must have just one item, not %d.' % len(d) return next(six.iteritems(d))
<SYSTEM_TASK:> Get a key from a dict which contains just one item. <END_TASK> <USER_TASK:> Description: def get_single_key(d): """Get a key from a dict which contains just one item."""
assert len(d) == 1, 'Single-item dict must have just one item, not %d.' % len(d) return next(six.iterkeys(d))
<SYSTEM_TASK:> Get a value from a dict which contains just one item. <END_TASK> <USER_TASK:> Description: def get_single_value(d): """Get a value from a dict which contains just one item."""
assert len(d) == 1, 'Single-item dict must have just one item, not %d.' % len(d) return next(six.itervalues(d))
<SYSTEM_TASK:> Get the list of distinct values with preserving order. <END_TASK> <USER_TASK:> Description: def distinct(xs): """Get the list of distinct values with preserving order."""
# don't use collections.OrderedDict because we do support Python 2.6 seen = set() return [x for x in xs if x not in seen and not seen.add(x)]
<SYSTEM_TASK:> Configures object based on its initialization <END_TASK> <USER_TASK:> Description: def configure(self): # type: () -> None """ Configures object based on its initialization """
for i in vars(self): if i.startswith("_"): continue val = self.__get(i, return_type=type(getattr(self, i))) if val is not None: setattr(self, i, val)
<SYSTEM_TASK:> Does the magic! <END_TASK> <USER_TASK:> Description: def run(self, **kwargs): """ Does the magic! """
logger.info('UpdateLocationsIfNecessaryTask was called') # read last ip count try: with open(app_settings.IP_ASSEMBLER_IP_CHANGED_FILE, 'r') as f: content_list = f.readlines() if len(content_list) == 0: ip_count_old = -1 else: ip_count_old = int(content_list[0]) except IOError: ip_count_old = -1 logger.info('read IP count of %(count)d' % {'count': ip_count_old}) # if IPs have significantly changed, update the locations ip_count_now = IP.objects.count() if ip_count_now == -1 or ip_count_now > ip_count_old + app_settings.IP_ASSEMBLER_IP_CHANGED_THRESHOLD: logger.info('Checking IP counts, last: %(ip_count_old)d - now: %(ip_count_now)d' % { 'ip_count_old': ip_count_old, 'ip_count_now': ip_count_now }) # call the updater task UpdateHtaccessLocationsTask().delay() # write the new count to the file try: open(app_settings.IP_ASSEMBLER_IP_CHANGED_FILE, 'w').close() with open(app_settings.IP_ASSEMBLER_IP_CHANGED_FILE, 'w') as f: f.write(str(ip_count_now)) except IOError: logger.exception('unable to write to file %(file_path)s' % {'file_path': app_settings.IP_ASSEMBLER_IP_CHANGED_FILE}) else: logger.info('nothing to do here')
<SYSTEM_TASK:> Checks the IMAP mailbox for new mails and tries to handle them. <END_TASK> <USER_TASK:> Description: def run(self, **kwargs): """ Checks the IMAP mailbox for new mails and tries to handle them. """
try: # connect to server and login box = imaplib.IMAP4_SSL(settings.IMAP_SERVER) box.login(settings.IMAP_USERNAME, settings.IMAP_PASSWORD) box.select() # search for all mails in the mailbox result, mail_indices = box.search(None, 'ALL') # if everything was ok... if result == 'OK': # check number of mails mail_count = len(mail_indices[0].split()) logger.info('found %(mail_count)d mails...' % {'mail_count': mail_count}) # iterate the mail indices and fetch the mails ips_created = 0 for mail_index in mail_indices[0].split(): logger.info('fetching mail %(mail_index)s...' % {'mail_index': int(mail_index)}) # mail data is a list with a tuple sub_result, mail_data = box.fetch(mail_index, '(BODY[TEXT])') if sub_result == 'OK': # fetch the ips ips = list_remove_duplicates( self.find_ips(''.join([str(data) for data in mail_data[0]])) ) # if ips found, add them and delete the mail if len(ips) > 0: logger.info('found %(count)d IPs' % {'count': len(ips)}) ips_created += IP.batch_add_ips(ips) box.store(mail_index, '+FLAGS', '\\Deleted') else: logger.error('fetching mail with index %(index)d failed' % {'index': mail_index}) # finally, if ips were added, unify the IPs if ips_created > 0: logger.info('created %(count)d IPs' % {'count': ips_created}) IP.unify_ips() else: logger.error('search returned not OK') box.close() box.logout() except: logger.exception('retrieving mail failed')
<SYSTEM_TASK:> Authorize with Trello, saving creds to `credfile`. <END_TASK> <USER_TASK:> Description: def authorize(client_key, client_secret, credfile=CONFIG_FILE, app='trlo.py', expiration='never', scope='read,write'): """ Authorize with Trello, saving creds to `credfile`. """
# 1. Obtain the request token. oauth = OAuth1Session(client_key, client_secret=client_secret) request_token = oauth.fetch_request_token(OAUTH_REQUEST_TOKEN_URL) # 2. Authorize the user (in the browser). authorization_url = oauth.authorization_url(OAUTH_BASE_AUTHORIZE_URL, name=app, expiration=expiration, scope=scope) print("Please visit the following URL to authorize this app:") print(authorization_url) print('') print("Once you've authorized, copy and paste the token Trello gives you below.") # Py3K backwards-compatibility shim global input try: input = raw_input except NameError: pass verifier = input("Trello's token: ").strip() # 3. Obtain the access token oauth = OAuth1Session(client_key, client_secret=client_secret, resource_owner_key=request_token['oauth_token'], resource_owner_secret=request_token['oauth_token_secret'], verifier=verifier) access_token = oauth.fetch_access_token(OAUTH_ACCESS_TOKEN_URL) # Save all our creds to ~/.trlo so we can get at 'em later. # The names are specially chosen so we can do OAuth1Session(**creds) creds = { 'client_key': client_key, 'client_secret': client_secret, 'resource_owner_key': access_token['oauth_token'], 'resource_owner_secret': access_token['oauth_token_secret'], } with open(credfile, 'w') as fp: json.dump(creds, fp)
<SYSTEM_TASK:> returns the country name from country code <END_TASK> <USER_TASK:> Description: def getCountry(value): """ returns the country name from country code @return string """
if not helpers.has_len(value): return False return COUNTRIES.get(str(value).lower(), False)
<SYSTEM_TASK:> checks if given value is a valid country codes <END_TASK> <USER_TASK:> Description: def validate(value): """ checks if given value is a valid country codes @param string value @return bool """
if not helpers.has_len(value): return False return COUNTRIES.has_key(str(value).lower())
<SYSTEM_TASK:> Render the tooltip for this column for an object <END_TASK> <USER_TASK:> Description: def render_tooltip(self, tooltip, obj): """Render the tooltip for this column for an object """
if self.tooltip_attr: val = getattr(obj, self.tooltip_attr) elif self.tooltip_value: val = self.tooltip_value else: return False setter = getattr(tooltip, TOOLTIP_SETTERS.get(self.tooltip_type)) if self.tooltip_type in TOOLTIP_SIZED_TYPES: setter(val, self.tooltip_image_size) else: setter(val) return True
<SYSTEM_TASK:> Select the key for a tool from a list of supported tools. <END_TASK> <USER_TASK:> Description: def choose_tool_key(full_configuration, keys): """ Select the key for a tool from a list of supported tools. This function is designed to help when multiple keys can be used to specify an option (e.g., during migration from one name to another). The values in keys should be ordered based on preference, as that's the order they'll be checked. If anything other than the first entry is selected, a warning will be displayed telling the user to migrate their configuration. Arguments: full_configuration - the full configuration for a run of the project keys - a list of keys to consider """
tool_key = _choose_key(full_configuration.config, keys) if tool_key != keys[0]: full_configuration.executor.warning( "{} is deprecated; migrate to {}".format(tool_key, keys[0]) ) return tool_key
<SYSTEM_TASK:> This helper function initializes a tool with the given args. <END_TASK> <USER_TASK:> Description: def tool_builder(component, key, tool_map, *args): """This helper function initializes a tool with the given args."""
# pylint: disable=protected-access tool_name = component.get(key) if tool_name: tool_fn = tool_map.get(tool_name) if tool_fn: return tool_fn[0](*args) raise Exception("Unknown {} '{}' for {}".format(key, tool_name, component.name)) raise MissingToolKey(key, component)
<SYSTEM_TASK:> Process arguments a tool cares about. <END_TASK> <USER_TASK:> Description: def args_builder(prefix, current_target, args_dict, value_found_fn): """ Process arguments a tool cares about. Since most tools require configuration, this function helps deal with the boilerplate. Each option will be processed based on all modifications supported by dev-pipeline (i.e., profiles and overrides) in the proper order. Arguments: prefix -- The prefix for each argument. This will be applied to everything in args_dict. current_target -- Information about the current target being processed. args_dict -- Something that acts like a dictionary. The keys should be options to deal with and the value should be the separtor value the option requires. The separator can be any type that has a join method, or None if lists are supported for that key. value_found_fn -- A function to call when a match is found. """
current_config = current_target.config for key, separator in args_dict.items(): option = "{}.{}".format(prefix, key) value = current_config.get_list(option) if value: if separator is None: separator = _NullJoiner(current_config.name, option) value_found_fn(separator.join(value), key)
<SYSTEM_TASK:> Helper function to build a list of options. <END_TASK> <USER_TASK:> Description: def build_flex_args_keys(components): """ Helper function to build a list of options. Some tools require require variations of the same options (e.g., cflags for debug vs release builds), but manually creating those options is cumbersome and error-prone. This function handles that work by combining all possible comintations of the values in components. Arguments components -- A list of lists that should be combined to form options. """
def _prepend_first(components, sub_components): ret = [] for first in components[0]: for sub_component in sub_components: ret.append("{}.{}".format(first, sub_component)) return ret if len(components) > 1: sub_components = build_flex_args_keys(components[1:]) return _prepend_first(components, sub_components) if len(components) == 1: return components[0] return []
<SYSTEM_TASK:> Either return the non-list value or raise an Exception. <END_TASK> <USER_TASK:> Description: def join(self, vals): """ Either return the non-list value or raise an Exception. Arguments: vals - a list of values to process """
if len(vals) == 1: return vals[0] raise Exception( "Too many values for {}:{}".format(self._component_name, self._key) )
<SYSTEM_TASK:> Handle an audio event. <END_TASK> <USER_TASK:> Description: def handle_audio(obj, wait=False): """Handle an audio event. This function plays an audio file. Currently only `.wav` format is supported. :param obj: An :py:class:`~turberfield.dialogue.model.Model.Audio` object. :param bool wait: Force a blocking wait until playback is complete. :return: The supplied object. """
if not simpleaudio: return obj fp = pkg_resources.resource_filename(obj.package, obj.resource) data = wave.open(fp, "rb") nChannels = data.getnchannels() bytesPerSample = data.getsampwidth() sampleRate = data.getframerate() nFrames = data.getnframes() framesPerMilliSecond = nChannels * sampleRate // 1000 offset = framesPerMilliSecond * obj.offset duration = nFrames - offset duration = min( duration, framesPerMilliSecond * obj.duration if obj.duration is not None else duration ) data.readframes(offset) frames = data.readframes(duration) for i in range(obj.loop): waveObj = simpleaudio.WaveObject(frames, nChannels, bytesPerSample, sampleRate) playObj = waveObj.play() if obj.loop > 1 or wait: playObj.wait_done() return obj
<SYSTEM_TASK:> Handle an interlude event. <END_TASK> <USER_TASK:> Description: def handle_interlude( self, obj, folder, index, ensemble, loop=None, **kwargs ): """Handle an interlude event. Interlude functions permit branching. They return a folder which the application can choose to adopt as the next supplier of dialogue. This handler calls the interlude with the supplied arguments and returns the result. :param obj: A callable object. :param folder: A :py:class:`~turberfield.dialogue.model.SceneScript.Folder` object. :param int index: Indicates which scene script in the folder is being processed. :param ensemble: A sequence of Python objects. :param branches: A sequence of :py:class:`~turberfield.dialogue.model.SceneScript.Folder` objects. from which to pick a branch in the action. :return: A :py:class:`~turberfield.dialogue.model.SceneScript.Folder` object. """
if obj is None: return folder.metadata else: return obj(folder, index, ensemble, loop=loop, **kwargs)
<SYSTEM_TASK:> Handle a line event. <END_TASK> <USER_TASK:> Description: def handle_line(self, obj): """Handle a line event. This function displays a line of dialogue. It generates a blocking wait for a period of time calculated from the length of the line. :param obj: A :py:class:`~turberfield.dialogue.model.Model.Line` object. :return: The supplied object. """
if obj.persona is None: return obj name = getattr(obj.persona, "_name", "") print( textwrap.indent( "{t.normal}{name}".format(name=name, t=self.terminal), " " * 2 ), end="\n", file=self.terminal.stream ) print( textwrap.indent( "{t.normal}{obj.text}".format( obj=obj, t=self.terminal ), " " * 10 ), end="\n" * 2, file=self.terminal.stream ) interval = self.pause + self.dwell * obj.text.count(" ") time.sleep(interval) return obj
<SYSTEM_TASK:> Handle a memory event. <END_TASK> <USER_TASK:> Description: def handle_memory(self, obj): """Handle a memory event. This function accesses the internal database. It writes a record containing state information and an optional note. :param obj: A :py:class:`~turberfield.dialogue.model.Model.Memory` object. :return: The supplied object. """
if obj.subject is not None: with self.con as db: SchemaBase.note( db, obj.subject, obj.state, obj.object, text=obj.text, html=obj.html, ) return obj
<SYSTEM_TASK:> Handle a property event. <END_TASK> <USER_TASK:> Description: def handle_property(self, obj): """Handle a property event. This function will set an attribute on an object if the event requires it. :param obj: A :py:class:`~turberfield.dialogue.model.Model.Property` object. :return: The supplied object. """
if obj.object is not None: try: setattr(obj.object, obj.attr, obj.val) except AttributeError as e: self.log.error(". ".join(getattr(e, "args", e) or e)) try: print( "{t.dim}{obj.object._name}.{obj.attr} = {obj.val!s}{t.normal}".format( obj=obj, t=self.terminal ), end="\n" * 2, file=self.terminal.stream ) except AttributeError as e: self.log.error(". ".join(getattr(e, "args", e) or e)) return obj
<SYSTEM_TASK:> Handle a scene event. <END_TASK> <USER_TASK:> Description: def handle_scene(self, obj): """Handle a scene event. This function applies a blocking wait at the start of a scene. :param obj: A :py:class:`~turberfield.dialogue.model.Model.Shot` object. :return: The supplied object. """
print( "{t.dim}{scene}{t.normal}".format( scene=obj.scene.capitalize(), t=self.terminal ), end="\n" * 3, file=self.terminal.stream ) time.sleep(self.pause) return obj
<SYSTEM_TASK:> obtain request from queue instead of directly from server socket <END_TASK> <USER_TASK:> Description: def process_request_thread(self, mainthread): """obtain request from queue instead of directly from server socket"""
life_time = time.time() nb_requests = 0 while not mainthread.killed(): if self.max_life_time > 0: if (time.time() - life_time) >= self.max_life_time: mainthread.add_worker(1) return try: SocketServer.ThreadingTCPServer.process_request_thread(self, *self.requests.get(True, 0.5)) except Queue.Empty: continue else: SocketServer.ThreadingTCPServer.process_request_thread(self, *self.requests.get()) LOG.debug("nb_requests: %d, max_requests: %d", nb_requests, self.max_requests) nb_requests += 1 if self.max_requests > 0 and nb_requests >= self.max_requests: mainthread.add_worker(1) return
<SYSTEM_TASK:> Build distributions of the code. <END_TASK> <USER_TASK:> Description: def build_distribution(): """Build distributions of the code."""
result = invoke.run('python setup.py sdist bdist_egg bdist_wheel', warn=True, hide=True) if result.ok: print("[{}GOOD{}] Distribution built without errors." .format(GOOD_COLOR, RESET_COLOR)) else: print('[{}ERROR{}] Something broke trying to package your ' 'code...'.format(ERROR_COLOR, RESET_COLOR)) print(result.stderr) sys.exit(1)
<SYSTEM_TASK:> Install things that need to be in place before installing the main package. <END_TASK> <USER_TASK:> Description: def other_dependencies(ctx, server, environment): """Install things that need to be in place before installing the main package."""
if 'extra_packages' in ctx.releaser: server = server.lower() extra_pkgs = [] if server in ["local"]: if 'local' in ctx.releaser.extra_packages: extra_pkgs.extend(ctx.releaser.extra_packages.local) elif server in ["testpypi", "pypitest"]: # these are packages not available on the test server, so install them # off the regular pypi server if 'test' in ctx.releaser.extra_packages and \ ctx.releaser.extra_packages.test is not None: extra_pkgs.extend(ctx.releaser.extra_packages.test) elif server in ["pypi"]: if 'pypi' in ctx.releaser.extra_packages and \ ctx.releaser.extra_packages.pypi is not None: extra_pkgs.extend(ctx.releaser.extra_packages.pypi) else: print("** Nothing more to install **") if extra_pkgs: print('** Other Dependencies, based on server', server, '**') for pkg in extra_pkgs: result = invoke.run('env{0}{1}{0}Scripts{0}pip{2} install {3}' .format(os.sep, environment, PIP_EXT, pkg), hide=True) if result.ok: print('{}[{}GOOD{}] Installed {}'.format("", GOOD_COLOR, RESET_COLOR, pkg)) else: print('{}[{}ERROR{}] Something broke trying to install ' 'package: {}'.format("", ERROR_COLOR, RESET_COLOR, pkg)) print(result.stderr) sys.exit(1)
<SYSTEM_TASK:> Upload and install works? <END_TASK> <USER_TASK:> Description: def check_local_install(ctx, version, ext, server="local"): """ Upload and install works? Uploads a distribution to PyPI, and then tests to see if I can download and install it. Returns: str: string summazing operation """
here = Path(ctx.releaser.here).resolve() dist_dir = here / 'dist' all_files = list(dist_dir.glob('*.{}'.format(ext))) the_file = all_files[0] for f in all_files[1:]: if f.stat().st_mtime > the_file.stat().st_mtime: the_file = f # this is the latest generated file of the given version environment = 'env-{}-{}-{}'.format(version, ext, server) if server == "local": pass else: # upload to server print("** Uploading to server **") cmd = 'twine upload {}'.format(the_file) # for PyPI, let twine pick the server if server != "pypi": cmd = cmd + ' -r {}'.format(server) result = invoke.run(cmd, warn=True) if result.failed: print(textwrap.fill("[{}ERROR{}] Something broke trying to upload " "your package. This will be the case if you " "have already uploaded it before. To upload " "again, use a different version number " "(or a different build by including a '+' " "suffix to your version number)." .format(ERROR_COLOR, RESET_COLOR), width=text.get_terminal_size().columns - 1, subsequent_indent=' '*8)) # print(result.stderr) # remove directory if it exists if (here / 'env' / environment).exists(): shutil.rmtree('env' + os.sep + environment) invoke.run('python -m venv env{}{}'.format(os.sep, environment)) other_dependencies(ctx, server, environment) if server == "local": result = invoke.run('env{0}{1}{0}Scripts{0}pip{2} install {3} --no-cache' .format(os.sep, environment, '.exe', the_file), hide=True) else: #print(" **Install from server**") result = invoke.run('env{0}{1}{0}Scripts{0}pip{2} install -i {3} ' '{4}=={5} --no-cache' .format(os.sep, environment, '.exe', server_url(server), ctx.releaser.module_name, version), hide=True) if result.failed: print('[{}ERROR{}] Something broke trying to install your package.' .format(ERROR_COLOR, RESET_COLOR)) print(result.stderr) sys.exit(1) print("** Test version of installed package **") result = invoke.run('env{0}{1}{0}Scripts{0}python{2} -c ' 'exec("""import {3}\\nprint({3}.__version__)""")' .format(os.sep, environment, '.exe', (ctx.releaser.module_name).strip())) test_version = result.stdout.strip() # print(test_version, type(test_version), type(expected_version)) if Version(test_version) == version: results = '{}{} install {} works!{}'.format(GOOD_COLOR, server, ext, RESET_COLOR) else: results = '{}{} install {} broken{}'.format(ERROR_COLOR, server, ext, RESET_COLOR) print(results) return results
<SYSTEM_TASK:> Append a member to a metadata array entry <END_TASK> <USER_TASK:> Description: def iscm_md_append_array(self, arraypath, member): """ Append a member to a metadata array entry """
array_path = string.split(arraypath, ".") array_key = array_path.pop() current = self.metadata for k in array_path: if not current.has_key(k): current[k] = {} current = current[k] if not current.has_key(array_key): current[array_key] = [] if not type(current[array_key]) == list: raise KeyError("%s doesn't point to an array" % arraypath) current[array_key].append(member)
<SYSTEM_TASK:> Lookup the variables in the provided dictionary, resolve with entries <END_TASK> <USER_TASK:> Description: def context_lookup(self, vars): """ Lookup the variables in the provided dictionary, resolve with entries in the context """
while isinstance(vars, IscmExpr): vars = vars.resolve(self.context) # for (k,v) in vars.items(): if isinstance(v, IscmExpr): vars[k] = v.resolve(self.context) return vars
<SYSTEM_TASK:> Apply this ISCM configuration into a launchable resource, such as <END_TASK> <USER_TASK:> Description: def apply_to(self, launchable): """ Apply this ISCM configuration into a launchable resource, such as an EC2 instance or an AutoScalingGroup LaunchConfig. """
# Update user data if launchable.get_property("UserData") is not None: raise NotImplementedError("It's not yet supported to append SCM to existing userdata") user_data = { "Fn::Base64" : { "Fn::Join" : ["", [ "\n".join([ r'#!/bin/bash', r'FATAL() { code=$1; shift; echo "[FATAL] $*" >&2; exit $code; }', r'ERROR() { echo "[ERROR] $*" >&2 ; }', r'WARN() { echo "[WARNING] $*" >&2 ; }', r'INFO() { echo "[INFO] $*" >&2 ; }', "", ]) ] + (self.wc_handle is not None and [ cfnjoin("", r'ISCM_WCHANDLE_URL="', self.wc_handle, '"\n' ) ] or []) + [ "\n".join([ r'{', r'INFO "CloudCast ISCM booting on $(date)"', "\n\n" ]) ] + self.userdata_elems + [ "\n".join([ "", r'iscm_result=$?', r'[ -n "$ISCM_WCHANDLE_URL" ] && [ -n "$(which cfn-signal)" ] && cfn-signal -e $iscm_result $ISCM_WCHANDLE_URL', '\nINFO "CloudCast ISCM successfully completed on $(date)"', '} 2>&1 | tee -a /iscm.log\n' ]) ] ]} } launchable.add_property("UserData", user_data) # Set meta data keys for k in self.metadata: if launchable.get_metadata_key(k) is not None: raise NotImplementedError("It's not yet supported to append to existing metadata keys") launchable.add_metadata_key(k, self.metadata[k])
<SYSTEM_TASK:> This method is intended to be used by backends only. <END_TASK> <USER_TASK:> Description: def register_uri_backend(uri_scheme, create_method, module, c14n_uri_method, escape, cast, is_connected): """ This method is intended to be used by backends only. It lets them register their services, identified by the URI scheme, at import time. The associated method create_method must take one parameter: the complete requested RFC 3986 compliant URI. The associated module must be compliant with DBAPI v2.0 but will not be directly used for other purposes than compatibility testing. c14n_uri_method must be a function that takes one string argument (the same form that the one that would be passed to connect_by_uri) and returns its canonicalized form in an implementation dependant way. This includes transforming any local pathname into an absolute form. c14n_uri_method can also be None, in which case the behavior will be the same as the one of the identity function. escape must be a function that takes one string argument (an unescaped column name) and returns an escaped version for use as an escaped column name in an SQL query for this backend. If something obviously not compatible is tried to be registred, NotImplementedError is raised. """
try: delta_api = __compare_api_level(module.apilevel, any_apilevel) mod_paramstyle = module.paramstyle mod_threadsafety = module.threadsafety except NameError: raise NotImplementedError("This module does not support registration " "of non DBAPI services of at least apilevel 2.0") if delta_api < 0 or delta_api > 1: raise NotImplementedError("This module does not support registration " "of DBAPI services with a specified apilevel of %s" % module.apilevel) if mod_paramstyle not in ['pyformat', 'format', 'qmark']: raise NotImplementedError("This module only supports registration " "of DBAPI services with a 'format' or 'pyformat' 'qmark' paramstyle, not %r" % mod_paramstyle) if mod_threadsafety < any_threadsafety: raise NotImplementedError("This module does not support registration " "of DBAPI services of threadsafety %d (more generally under %d)" % (mod_threadsafety, any_threadsafety)) if not urisup.valid_scheme(uri_scheme): raise urisup.InvalidSchemeError("Can't register an invalid URI scheme %r" % uri_scheme) __uri_create_methods[uri_scheme] = (create_method, module, c14n_uri_method, escape, cast, is_connected)
<SYSTEM_TASK:> Connect to the database. <END_TASK> <USER_TASK:> Description: def __connect(self): """ Connect to the database. """
self.__methods = _get_methods_by_uri(self.sqluri) uri_connect_method = self.__methods[METHOD_CONNECT] self.__dbapi2_conn = uri_connect_method(self.sqluri)
<SYSTEM_TASK:> Reconnect to the database. <END_TASK> <USER_TASK:> Description: def reconnect(self, query = None, log_reconnect = False): """ Reconnect to the database. """
uri = list(urisup.uri_help_split(self.sqluri)) if uri[1]: authority = list(uri[1]) if authority[1]: authority[1] = None uri[1] = authority if log_reconnect: LOG.warning('reconnecting to %r database (query: %r)', urisup.uri_help_unsplit(uri), query) self.__connect()
<SYSTEM_TASK:> Performs bulk operation <END_TASK> <USER_TASK:> Description: def perform(self): """ Performs bulk operation """
for request in self._cfg[Integrator._CFG_KEY_REQUESTS]: request_type = self._cfg[Integrator._CFG_KEY_REQUESTS][request][Integrator._CFG_KEY_REQUEST_TYPE] request_cfg_file = self._cfg[Integrator._CFG_KEY_REQUESTS][request][Integrator._CFG_KEY_REQUEST_CFG_FILE] self._logger.debug('{}'.format(request_cfg_file)) self._process_request(request, request_type, request_cfg_file)
<SYSTEM_TASK:> Create and return a Builder for a component. <END_TASK> <USER_TASK:> Description: def _make_builder(config, current_target): """ Create and return a Builder for a component. Arguments component - The component the builder should be created for. """
tool_key = devpipeline_core.toolsupport.choose_tool_key( current_target, _BUILD_TOOL_KEYS ) return devpipeline_core.toolsupport.tool_builder( config, tool_key, devpipeline_build.BUILDERS, current_target )
<SYSTEM_TASK:> Build a target. <END_TASK> <USER_TASK:> Description: def build_task(current_target): """ Build a target. Arguments target - The target to build. """
target = current_target.config try: builder = _make_builder(target, current_target) build_path = _get_build_path(target, builder) if not os.path.exists(build_path): os.makedirs(build_path) builder.configure(src_dir=target.get("dp.src_dir"), build_dir=build_path) builder.build(build_dir=build_path) no_install = devpipeline_core.toolsupport.choose_tool_key( current_target, _NO_INSTALL_KEYS ) if no_install not in target: install_path = target.get( devpipeline_core.toolsupport.choose_tool_key( current_target, _INSTALL_PATH_KEYS ), fallback="install", ) builder.install(build_dir=build_path, install_dir=install_path) _find_file_paths(target, os.path.join(build_path, install_path)) except devpipeline_core.toolsupport.MissingToolKey as mtk: current_target.executor.warning(mtk)
<SYSTEM_TASK:> Return a dictionary representation of this phase. <END_TASK> <USER_TASK:> Description: def get_dict_repr(self): """ Return a dictionary representation of this phase. This will be used for checksumming, in order to uniquely compare instance images against their requirements """
return dict( phase_name = self.phase_name, phase_type = self.phase_type, actions = self.actions )
<SYSTEM_TASK:> get elements by discrete indices <END_TASK> <USER_TASK:> Description: def discrete_index(self, indices): """get elements by discrete indices :param indices: list discrete indices :return: elements """
elements = [] for i in indices: elements.append(self[i]) return elements
<SYSTEM_TASK:> Finds the rotation, scaling and translation of im1 relative to im0 <END_TASK> <USER_TASK:> Description: def register_images(im0, im1, *, rmMean=True, correctScale=True): """Finds the rotation, scaling and translation of im1 relative to im0 Parameters ---------- im0: First image im1: Second image rmMean: Set to true to remove the mean (Default) Returns ------- angle: The angle difference scale: The scale difference [y, x]: The offset im2: The rotated and translated second image Notes ----- The algorithm uses gaussian fit for subpixel precision. The best case would be to have two squares images of the same size. The algorithm is faster if the size is a power of 2. """
# sanitize input im0 = np.asarray(im0, dtype=np.float32) im1 = np.asarray(im1, dtype=np.float32) if rmMean: # remove mean im0 = im0 - im0.mean() im1 = im1 - im1.mean() # Compute DFT (THe images are resized to the same size) f0, f1 = dft_optsize_same(im0, im1) # Get rotation and scale angle, scale = find_rotation_scale(f0, f1, isccs=True) # Avoid fluctiuations if not correctScale: if np.abs(1 - scale) > 0.05: warnings.warn("Scale should be corrected") scale = 1 # apply rotation and scale im2 = rotate_scale(im1, angle, scale) f2 = dft_optsize(im2, shape=f0.shape) # Find offset y, x = find_shift_dft(f0, f2, isccs=True) return angle, scale, [y, x], im2
<SYSTEM_TASK:> Compares the images and return the best guess for the rotation angle, <END_TASK> <USER_TASK:> Description: def find_rotation_scale(im0, im1, isccs=False): """Compares the images and return the best guess for the rotation angle, and scale difference. Parameters ---------- im0: 2d array First image im1: 2d array Second image isccs: boolean, default False Set to True if the images are alredy DFT and in CCS representation Returns ------- angle: number The angle difference scale: number The scale difference Notes ----- Uses find_shift_dft """
# sanitize input im0 = np.asarray(im0, dtype=np.float32) im1 = np.asarray(im1, dtype=np.float32) truesize = None # if ccs, convert to shifted dft before giving to polar_fft if isccs: truesize = im0.shape im0 = centered_mag_sq_ccs(im0) im1 = centered_mag_sq_ccs(im1) # Get log polar coordinates. choose the log base lp1, log_base = polar_fft(im1, logpolar=True, isshiftdft=isccs, logoutput=True, truesize=truesize) lp0, log_base = polar_fft(im0, logpolar=True, isshiftdft=isccs, logoutput=True, truesize=truesize, nangle=lp1.shape[0], radiimax=lp1.shape[1]) # Find the shift with log of the log-polar images, # to compensate for dft intensity repartition angle, scale = find_shift_dft(lp0, lp1) # get angle in correct units angle *= np.pi / lp1.shape[0] # get scale in linear units scale = log_base ** (scale) # return angle and scale return angle, scale
<SYSTEM_TASK:> Finds the best shift between im0 and im1 using cross correlation <END_TASK> <USER_TASK:> Description: def find_shift_cc(im0, im1, ylim=None, xlim=None, subpix=True): """Finds the best shift between im0 and im1 using cross correlation Parameters ---------- im0: 2d array First image im1: 2d array Second image ylim: 2 numbers, optional The y limits of the search (if None full range is searched) xlim: 2 numbers, optional Ibidem with x Returns ------- [y, x]: 2 numbers The offset Notes ----- The origin of im1 in the im0 referential is returned ylim and xlim limit the possible output. No subpixel precision """
# sanitize input im0 = np.asarray(im0, dtype=np.float32) im1 = np.asarray(im1, dtype=np.float32) # Remove mean im0 = im0 - np.nanmean(im0) im1 = im1 - np.nanmean(im1) # Save shapes as np array shape0 = np.asarray(im0.shape) shape1 = np.asarray(im1.shape) # Compute the offset and the pad (yleft,yright,xtop,xbottom) offset = 1 - shape1 pad = np.lib.pad(-offset, (1, 1), mode='edge') # apply limit on padding if ylim is not None: pad[0] = -ylim[0] pad[1] = ylim[1] + (shape1 - shape0)[0] if xlim is not None: pad[2] = -xlim[0] pad[3] = xlim[1] + (shape1 - shape0)[1] # pad image im0, offset = pad_img(im0, pad) # compute Cross correlation matrix xc = cv2.matchTemplate(im0, im1, cv2.TM_CCORR) # Find maximum of abs (can be anticorrelated) idx = np.asarray(np.unravel_index(np.argmax(xc), xc.shape)) # Return origin in im0 units if subpix: # update idx idx = np.asarray([get_peak_pos(xc[:, idx[1]], wrap=False), get_peak_pos(xc[idx[0], :], wrap=False)]) else: # restrics to reasonable values idx[idx > shape // 2] -= shape[idx > shape // 2] return idx + offset
<SYSTEM_TASK:> Combine similar images into one to reduce the noise <END_TASK> <USER_TASK:> Description: def combine_images(imgs, register=True): """Combine similar images into one to reduce the noise Parameters ---------- imgs: list of 2d array Series of images register: Boolean, default False True if the images should be register before combination Returns ------- im: 2d array The result image Notes ----- This is an example of the usage of the library """
imgs = np.asarray(imgs, dtype="float") if register: for i in range(1, imgs.shape[0]): ret = register_images(imgs[0, :, :], imgs[i, :, :]) imgs[i, :, :] = rotate_scale_shift(imgs[i, :, :], *ret[:3], np.nan) return np.mean(imgs, 0)
<SYSTEM_TASK:> Resize image for optimal DFT and computes it <END_TASK> <USER_TASK:> Description: def dft_optsize(im, shape=None): """Resize image for optimal DFT and computes it Parameters ---------- im: 2d array The image shape: 2 numbers, optional The shape of the output image (None will optimize the shape) Returns ------- dft: 2d array The dft in CCS representation Notes ----- Th shape shoulb be a product of 2, 3, and 5 """
im = np.asarray(im) # save shape initshape = im.shape # get optimal size if shape is None: ys = cv2.getOptimalDFTSize(initshape[0]) xs = cv2.getOptimalDFTSize(initshape[1]) shape = [ys, xs] # Add zeros to go to optimal size im = cv2.copyMakeBorder(im, 0, shape[0] - initshape[0], 0, shape[1] - initshape[1], borderType=cv2.BORDER_CONSTANT, value=0) # Compute dft ignoring 0 rows (0 columns can not be optimized) f = cv2.dft(im, nonzeroRows=initshape[0]) return f
<SYSTEM_TASK:> Resize 2 image same size for optimal DFT and computes it <END_TASK> <USER_TASK:> Description: def dft_optsize_same(im0, im1): """Resize 2 image same size for optimal DFT and computes it Parameters ---------- im0: 2d array First image im1: 2d array Second image Returns ------- dft0: 2d array The dft of the first image dft1: 2d array The dft of the second image Notes ----- dft0 and dft1 will have the same size """
im0 = np.asarray(im0) im1 = np.asarray(im1) # save shape shape0 = im0.shape shape1 = im1.shape # get optimal size ys = max(cv2.getOptimalDFTSize(shape0[0]), cv2.getOptimalDFTSize(shape1[0])) xs = max(cv2.getOptimalDFTSize(shape0[1]), cv2.getOptimalDFTSize(shape1[1])) shape = [ys, xs] f0 = dft_optsize(im0, shape=shape) f1 = dft_optsize(im1, shape=shape) return f0, f1
<SYSTEM_TASK:> Rotates and scales the image <END_TASK> <USER_TASK:> Description: def rotate_scale(im, angle, scale, borderValue=0, interp=cv2.INTER_CUBIC): """Rotates and scales the image Parameters ---------- im: 2d array The image angle: number The angle, in radians, to rotate scale: positive number The scale factor borderValue: number, default 0 The value for the pixels outside the border (default 0) Returns ------- im: 2d array the rotated and scaled image Notes ----- The output image has the same size as the input. Therefore the image may be cropped in the process. """
im = np.asarray(im, dtype=np.float32) rows, cols = im.shape M = cv2.getRotationMatrix2D( (cols / 2, rows / 2), -angle * 180 / np.pi, 1 / scale) im = cv2.warpAffine(im, M, (cols, rows), borderMode=cv2.BORDER_CONSTANT, flags=interp, borderValue=borderValue) # REPLICATE return im
<SYSTEM_TASK:> shift the image <END_TASK> <USER_TASK:> Description: def shift_image(im, shift, borderValue=0): """shift the image Parameters ---------- im: 2d array The image shift: 2 numbers (y,x) the shift in y and x direction borderValue: number, default 0 The value for the pixels outside the border (default 0) Returns ------- im: 2d array The shifted image Notes ----- The output image has the same size as the input. Therefore the image will be cropped in the process. """
im = np.asarray(im, dtype=np.float32) rows, cols = im.shape M = np.asarray([[1, 0, shift[1]], [0, 1, shift[0]]], dtype=np.float32) return cv2.warpAffine(im, M, (cols, rows), borderMode=cv2.BORDER_CONSTANT, flags=cv2.INTER_CUBIC, borderValue=borderValue)
<SYSTEM_TASK:> normalize the ccs representation <END_TASK> <USER_TASK:> Description: def ccs_normalize(compIM, ccsnorm): """ normalize the ccs representation Parameters ---------- compIM: 2d array The CCS image in CCS representation ccsnorm: 2d array The normalization matrix in ccs representation Returns ------- compIM: 2d array The normalized CCS image Notes ----- (basically an element wise division for CCS) Should probably not be used from outside """
compIM = np.asarray(compIM) ccsnorm = np.asarray(ccsnorm) ys = ccsnorm.shape[0] xs = ccsnorm.shape[1] # start with first column ccsnorm[2::2, 0] = ccsnorm[1:ys - 1:2, 0] # continue with middle columns ccsnorm[:, 2::2] = ccsnorm[:, 1:xs - 1:2] # finish whith last row if even if xs % 2 is 0: ccsnorm[2::2, xs - 1] = ccsnorm[1:ys - 1:2, xs - 1] # solve problem with 0/0 ccsnorm[ccsnorm == 0] = np.nextafter(0., 1., dtype = ccsnorm.dtype) res = compIM / ccsnorm return res
<SYSTEM_TASK:> Fit the function to a gaussian. <END_TASK> <USER_TASK:> Description: def gauss_fit(X, Y): """ Fit the function to a gaussian. Parameters ---------- X: 1d array X values Y: 1d array Y values Returns ------- (The return from scipy.optimize.curve_fit) popt : array Optimal values for the parameters pcov : 2d array The estimated covariance of popt. Notes ----- /!\ This uses a slow curve_fit function! do not use if need speed! """
X = np.asarray(X) Y = np.asarray(Y) # Can not have negative values Y[Y < 0] = 0 # define gauss function def gauss(x, a, x0, sigma): return a * np.exp(-(x - x0)**2 / (2 * sigma**2)) # get first estimation for parameter mean = (X * Y).sum() / Y.sum() sigma = np.sqrt((Y * ((X - mean)**2)).sum() / Y.sum()) height = Y.max() # fit with curve_fit return curve_fit(gauss, X, Y, p0=[height, mean, sigma])
<SYSTEM_TASK:> Fit the log of the input to the log of a gaussian. <END_TASK> <USER_TASK:> Description: def gauss_fit_log(X, Y): """ Fit the log of the input to the log of a gaussian. Parameters ---------- X: 1d array X values Y: 1d array Y values Returns ------- mean: number The mean of the gaussian curve var: number The variance of the gaussian curve Notes ----- The least square method is used. As this is a log, make sure the amplitude is >> noise See the gausslog_sympy.py file for explaination """
X = np.asarray(X) Y = np.asarray(Y) # take log data Data = np.log(Y) # Get Di and Xi D = [(Data * X**i).sum() for i in range(3)] X = [(X**i).sum() for i in range(5)] # compute numerator and denominator for mean and variance num = (D[0] * (X[1] * X[4] - X[2] * X[3]) + D[1] * (X[2]**2 - X[0] * X[4]) + D[2] * (X[0] * X[3] - X[1] * X[2])) den = 2 * (D[0] * (X[1] * X[3] - X[2]**2) + D[1] * (X[1] * X[2] - X[0] * X[3]) + D[2] * (X[0] * X[2] - X[1]**2)) varnum = (-X[0] * X[2] * X[4] + X[0] * X[3]**2 + X[1]**2 * X[4] - 2 * X[1] * X[2] * X[3] + X[2]**3) # if denominator is 0, can't do anything if abs(den) < 0.00001: # print('Warning: zero denominator!',den) return np.nan, np.nan # compute mean and variance mean = num / den var = varnum / den # if variance is negative, the data are not a gaussian if var < 0: # print('Warning: negative Variance!',var) return np.nan, np.nan return mean, var
<SYSTEM_TASK:> Get center of mass <END_TASK> <USER_TASK:> Description: def center_of_mass(X, Y): """Get center of mass Parameters ---------- X: 1d array X values Y: 1d array Y values Returns ------- res: number The position of the center of mass in X Notes ----- Uses least squares """
X = np.asarray(X) Y = np.asarray(Y) return (X * Y).sum() / Y.sum()
<SYSTEM_TASK:> Get the peak position with subpixel precision <END_TASK> <USER_TASK:> Description: def get_peak_pos(im, wrap=False): """Get the peak position with subpixel precision Parameters ---------- im: 2d array The image containing a peak wrap: boolean, defaults False True if the image reoresents a torric world Returns ------- [y,x]: 2 numbers The position of the highest peak with subpixel precision Notes ----- This is a bit hacky and could be improved """
im = np.asarray(im) # remove invalid values (assuming im>0) im[np.logical_not(np.isfinite(im))] = 0 # remove mean im = im - im.mean() # get maximum value argmax = im.argmax() dsize = im.size # get cut value (30% biggest peak) # TODO: choose less random value cut = .3 * im[argmax] # isolate peak peak = im > cut peak, __ = label(peak) # wrap border if wrap and peak[0] != 0 and peak[-1] != 0 and peak[0] != peak[-1]: peak[peak == peak[-1]] = peak[0] # extract peak peak = peak == peak[argmax] # get values along X and Y X = np.arange(dsize)[peak] Y = im[peak] # wrap border if wrap: # wrap X values d X[X > dsize // 2] -= dsize # remove argmax as in X**4 X should be small offset = X[Y == Y.max()][0] X -= offset # We want to fit in a radius of 3 around the center Y = Y[abs(X) < 3] X = X[abs(X) < 3] # if>2, use fit_log if peak.sum() > 2: ret, __ = gauss_fit_log(X, Y) # if fails, use center_of_mass if ret is np.nan: ret = center_of_mass(X, Y) elif peak.sum() > 1: # If only 2 pixel, gauss fitting is imposible, use center_of_mass ret = center_of_mass(X, Y) else: # 1 px peak is easy ret = X[0] """ import matplotlib.pyplot as plt plt.figure() plt.plot(X,Y,'x',label='im') plt.plot([ret,ret],[1,Y.max()],label='logfit') plt.plot([X.min(),X.max()],[cut,cut]) plt.plot([X.min(),X.max()],[im.std(),im.std()]) #""" return ret + offset
<SYSTEM_TASK:> return centered squared magnitude <END_TASK> <USER_TASK:> Description: def centered_mag_sq_ccs(im): """return centered squared magnitude Parameters ---------- im: 2d array A CCS DFT image Returns ------- im: 2d array A centered image of the magnitude of the DFT Notes ----- Check doc Intel* Image Processing Library https://www.comp.nus.edu.sg/~cs4243/doc/ipl.pdf The center is at position (ys//2, 0) """
im = np.asarray(im) # multiply image by image* to get squared magnitude im = cv2.mulSpectrums(im, im, flags=0, conjB=True) ys = im.shape[0] xs = im.shape[1] # get correct size return ret = np.zeros((ys, xs // 2 + 1)) # first column: # center ret[ys // 2, 0] = im[0, 0] # bottom ret[ys // 2 + 1:, 0] = im[1:ys - 1:2, 0] # top (Inverted copy bottom) ret[ys // 2 - 1::-1, 0] = im[1::2, 0] # center columns ret[ys // 2:, 1:] = im[:(ys - 1) // 2 + 1, 1::2] ret[:ys // 2, 1:] = im[(ys - 1) // 2 + 1:, 1::2] # correct last line if even if xs % 2 is 0: ret[ys // 2 + 1:, xs // 2] = im[1:ys - 1:2, xs - 1] ret[:ys // 2, xs // 2] = 0 return ret
<SYSTEM_TASK:> Simple test to check if image is overexposed <END_TASK> <USER_TASK:> Description: def is_overexposed(ims): """Simple test to check if image is overexposed Parameters ---------- im: 2d array integer the image Returns ------- overexposed: Bool Is the image overexposed """
if len(np.shape(ims)) == 3: return [is_overexposed(im) for im in ims] ims = np.array(ims, int) diffbincount = np.diff(np.bincount(np.ravel(ims))) overexposed = diffbincount[-1] > np.std(diffbincount) return overexposed