Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
13,900
def _clean_block(response_dict): response_dict[] = parser.parse(response_dict[]) response_dict[] = parser.parse(response_dict[]) return response_dict
Pythonize a blockcypher API response
13,901
def get_locale_choices(locale_dir): file_name_s = os.listdir(locale_dir) choice_s = [] for file_name in file_name_s: if file_name.endswith(I18n.TT_FILE_EXT_STXT): file_name_noext, _ = os.path.splitext(file_name) if file_name_noext: choice_s.append(file_name_noext) choice_s = sorted(choice_s) return choice_s
Get a list of locale file names in the given locale dir.
13,902
def itemComment(self, commentId): url = "%s/comments/%s" % (self.root, commentId) params = { "f": "json" } return self._get(url, params, securityHandler=self._securityHandler, proxy_port=self._proxy_port, proxy_url=self._proxy_url)
returns details of a single comment
13,903
def translate(self, content): v = content.value if v is None: return if isinstance(v, dict): cls = content.real.name content.value = Factory.object(cls, v) md = content.value.__metadata__ md.sxtype = content.type return v = content.real.translate(v, False) content.value = v return self
Translate using the XSD type information. Python I{dict} is translated to a suds object. Most importantly, primative values are translated from python types to XML types using the XSD type. @param content: The content to translate. @type content: L{Object} @return: self @rtype: L{Typed}
13,904
def analyze(self, handle, filename): handle.seek(0) try: return self.jbx.submit_sample(handle)[][0] except (jbxapi.JoeException, KeyError, IndexError) as e: raise sandboxapi.SandboxError("error in analyze: {e}".format(e=e))
Submit a file for analysis. :type handle: File handle :param handle: Handle to file to upload for analysis. :type filename: str :param filename: File name. :rtype: str :return: Task ID as a string
13,905
def AgregarBonificacionPenalizacion(self, codigo, detalle, resultado=None, porcentaje=None, importe=None, **kwargs): "Agrega la información referente a las bonificaciones o penalizaciones" ret = dict(codBonificacionPenalizacion=codigo, detalle=detalle, resultado=resultado, porcentajeAAplicar=porcentaje, importe=importe) self.solicitud[].append(ret) return True
Agrega la información referente a las bonificaciones o penalizaciones
13,906
def read_dna(path): filename, ext = os.path.splitext(os.path.split(path)[-1]) genbank_exts = [, ] fasta_exts = [, , , ] abi_exts = [, ] if any([ext == extension for extension in genbank_exts]): file_format = elif any([ext == extension for extension in fasta_exts]): file_format = elif any([ext == extension for extension in abi_exts]): file_format = else: raise ValueError() seq = SeqIO.read(path, file_format) dna = coral.DNA(str(seq.seq)) if seq.name == : dna.name = filename else: dna.name = seq.name for feature in seq.features: try: dna.features.append(_seqfeature_to_coral(feature)) except FeatureNameError: pass dna.features = sorted(dna.features, key=lambda feature: feature.start) dna.circular = True return dna
Read DNA from file. Uses BioPython and coerces to coral format. :param path: Full path to input file. :type path: str :returns: DNA sequence. :rtype: coral.DNA
13,907
def update_extent_location(self, extent_loc): if not self._initialized: raise pycdlibexception.PyCdlibInternalError() self.extent_location = extent_loc
A method to update the extent location for this Path Table Record. Parameters: extent_loc - The new extent location. Returns: Nothing.
13,908
def flip(a, axis): if not hasattr(a, ): a = np.asarray(a) indexer = [slice(None)] * a.ndim try: indexer[axis] = slice(None, None, -1) except IndexError: raise ValueError( .format(axis, a.ndim)) return a[tuple(indexer)]
Reverse the order of elements in an array along the given axis. This function is a backport of `numpy.flip` introduced in NumPy 1.12. See Also -------- numpy.flip
13,909
def clear(self): self.prop_dt_map = dict() self.prop_data = dict() self.rev_lookup = defaultdict(set)
convinience function to empty this fastrun container
13,910
def delete(self, subject): session = subject.get_session(False) if (session): session.remove_internal_attribute(self.dsc_ask) session.remove_internal_attribute(self.dsc_isk)
:type subject: subject_abcs.Subject
13,911
def check_directories(self): self.log.debug() if not os.path.exists(self._ve_dir): os.makedirs(self._ve_dir) if not os.path.exists(self._app_dir): os.makedirs(self._app_dir) if not os.path.exists(self._conf_dir): os.makedirs(self._conf_dir) if not os.path.exists(self._var_dir): os.makedirs(self._var_dir) if not os.path.exists(self._log_dir): os.makedirs(self._log_dir) if not os.path.exists(self._script_dir): os.makedirs(self._script_dir) uwsgi_params = if os.path.exists(uwsgi_params): shutil.copy(uwsgi_params, self._conf_dir) else: logging.warning(.format(self._conf_dir)) mime_types = if os.path.exists(mime_types): shutil.copy(mime_types, self._conf_dir) self._include_mimetypes = True else: logging.warn(.format(self._conf_dir))
Creates base directories for app, virtualenv, and nginx
13,912
def clean_key(cls, key): for var_re in cls.VARIABLE_RES: key = var_re.sub(, key) return key
Replace things that look like variables with a '#' so tests aren't affected by random variables
13,913
def register_periodic_tasks(self, tasks: Iterable[Task]): for task in tasks: self._scheduler.enter( int(task.periodicity.total_seconds()), 0, self._schedule_periodic_task, argument=(task,) )
Register tasks that need to be scheduled periodically.
13,914
def receive(self, decode=True): payload = self.socket.recv() payload = self.verify(payload) if decode: payload = self.decode(payload) return payload
Receive from socket, authenticate and decode payload
13,915
def hline_score(self, y, xmin, xmax): return self._hline_score[y, xmin, xmax]
Returns the number of unbroken paths of qubits >>> [(x,y,0,k) for x in range(xmin,xmax+1)] for :math:`k = 0,1,\cdots,L-1`. This is precomputed for speed.
13,916
def call_for_nodes(node, callback, recursive=False): result = callback(node) if recursive and not result: for child in get_child_nodes(node): call_for_nodes(child, callback, recursive)
If callback returns `True` the child nodes are skipped
13,917
def to_string(self, format_, fps=None, **kwargs): fp = io.StringIO() self.to_file(fp, format_, fps=fps, **kwargs) return fp.getvalue()
Get subtitle file as a string. See :meth:`SSAFile.save()` for full description. Returns: str
13,918
def lifting_condensation_level(T, RH): Tadj = T-55. return cp/g*(Tadj - (1/Tadj - log(RH)/2840.)**(-1))
Compute the Lifiting Condensation Level (LCL) for a given temperature and relative humidity Inputs: T is temperature in Kelvin RH is relative humidity (dimensionless) Output: LCL in meters This is height (relative to parcel height) at which the parcel would become saturated during adiabatic ascent. Based on approximate formula from Bolton (1980 MWR) as given by Romps (2017 JAS) For an exact formula see Romps (2017 JAS), doi:10.1175/JAS-D-17-0102.1
13,919
def set_columns(self, columns): if isinstance(columns, tuple): self.columns = columns elif is_text_string(columns): self.columns = tuple(int(e) for e in columns.split()) self.update()
Set edge line columns values.
13,920
def cmd(): parser = argparse.ArgumentParser() in_group = parser.add_mutually_exclusive_group() in_group.add_argument(, nargs=, default=[], help=) in_group.add_argument(, , help=) parser.add_argument(, , help=) args = parser.parse_args() if args.input_file is not None: if args.input_file == : ifile = sys.stdin else: ifile = open(args.input_file) else: ifile = sys.stdin if args.output_file is not None: if args.output_file == : ofile = sys.stdout else: ofile = open(args.output_file, ) else: ofile = sys.stdout if len(args.string) > 0: in_string = .join(args.string) else: with ifile: in_string = ifile.read() with ofile: ofile.write(titlecase(in_string))
Handler for command line invocation
13,921
def get_provider_info(self, user_descriptor): route_values = {} if user_descriptor is not None: route_values[] = self._serialize.url(, user_descriptor, ) response = self._send(http_method=, location_id=, version=, route_values=route_values) return self._deserialize(, response)
GetProviderInfo. [Preview API] :param str user_descriptor: :rtype: :class:`<GraphProviderInfo> <azure.devops.v5_0.graph.models.GraphProviderInfo>`
13,922
def _num2deg(self, tile): n = 2.0 ** tile.zoom lon_deg = tile.x / n * 360.0 - 180.0 lat_rad = mod_math.atan(mod_math.sinh(mod_math.pi * (1 - 2 * tile.y / n))) lat_deg = mod_math.degrees(lat_rad) return (lat_deg, lon_deg)
Taken from http://wiki.openstreetmap.org/wiki/Slippy_map_tilenames#Python
13,923
def delete(self, **kwargs): if self._dxid is not None: return dxpy.api.app_delete(self._dxid, **kwargs) else: return dxpy.api.app_delete( + self._name, alias=self._alias, **kwargs)
Removes this app object from the platform. The current user must be a developer of the app.
13,924
def unset_key(dotenv_path, key_to_unset, quote_mode="always"): if not os.path.exists(dotenv_path): warnings.warn("cant exist." % dotenv_path) return None, key_to_unset removed = False with rewrite(dotenv_path) as (source, dest): for mapping in parse_stream(source): if mapping.key == key_to_unset: removed = True else: dest.write(mapping.original) if not removed: warnings.warn("key %s not removed from %s - key doesn't exist." % (key_to_unset, dotenv_path)) return None, key_to_unset return removed, key_to_unset
Removes a given key from the given .env If the .env path given doesn't exist, fails If the given key doesn't exist in the .env, fails
13,925
def _extract_file(self, tgz, tarinfo, dst_path, buffer_size=10<<20): src = tgz.extractfile(tarinfo) dst = tf_v1.gfile.GFile(dst_path, "wb") while 1: buf = src.read(buffer_size) if not buf: break dst.write(buf) self._log_progress(len(buf)) dst.close() src.close()
Extracts 'tarinfo' from 'tgz' and writes to 'dst_path'.
13,926
def remove_all_annotations_from_tier(self, id_tier, clean=True): for aid in self.tiers[id_tier][0]: del(self.annotations[aid]) for aid in self.tiers[id_tier][1]: del(self.annotations[aid]) self.tiers[id_tier][0].clear() self.tiers[id_tier][1].clear() if clean: self.clean_time_slots()
remove all annotations from a tier :param str id_tier: Name of the tier. :raises KeyError: If the tier is non existent.
13,927
def _get_odoo_version_info(addons_dir, odoo_version_override=None): odoo_version_info = None addons = os.listdir(addons_dir) for addon in addons: addon_dir = os.path.join(addons_dir, addon) if is_installable_addon(addon_dir): manifest = read_manifest(addon_dir) _, _, addon_odoo_version_info = _get_version( addon_dir, manifest, odoo_version_override, git_post_version=False) if odoo_version_info is not None and \ odoo_version_info != addon_odoo_version_info: raise DistutilsSetupError("Not all addons are for the same " "odoo version in %s (error detected " "in %s)" % (addons_dir, addon)) odoo_version_info = addon_odoo_version_info return odoo_version_info
Detect Odoo version from an addons directory
13,928
def clear_cached_realms(self, realms, params=None): if realms in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument .") return self.transport.perform_request( "POST", _make_path("_security", "realm", realms, "_clear_cache"), params=params, )
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-cache.html>`_ :arg realms: Comma-separated list of realms to clear :arg usernames: Comma-separated list of usernames to clear from the cache
13,929
def _get_transitions(self, probs, indexes, tree_idxs, batch_info, forward_steps=1, discount_factor=1.0): if forward_steps > 1: transition_arrays = self.backend.get_transitions_forward_steps(indexes, forward_steps, discount_factor) else: transition_arrays = self.backend.get_transitions(indexes) priority_weight = self.priority_weight.value(batch_info[]) probs = probs / np.array([s.total() for s in self.backend.segment_trees], dtype=float).reshape(1, -1) capacity = self.backend.current_size weights = (capacity * probs) ** (-priority_weight) weights = weights / weights.max(axis=0, keepdims=True) transition_arrays[] = weights transition_tensors = {k: torch.from_numpy(v) for k, v in transition_arrays.items()} transitions = Trajectories( num_steps=indexes.shape[0], num_envs=indexes.shape[1], environment_information=None, transition_tensors=transition_tensors, rollout_tensors={}, extra_data={ : tree_idxs } ) return transitions.to_transitions()
Return batch of frames for given indexes
13,930
def content(): message = m.Message() paragraph = m.Paragraph( m.Image( % resources_path()), style_class= ) message.add(paragraph) paragraph = m.Paragraph(tr( ), m.Image( % resources_path(), **SMALL_ICON_STYLE), ) message.add(paragraph) message.add(field_mapping_help_content()) return message
Helper method that returns just the content. This method was added so that the text could be reused in the dock_help module. .. versionadded:: 4.1.0 :returns: A message object without brand element. :rtype: safe.messaging.message.Message
13,931
def setup(): if None in [RTs._rt, RTs._rtp]: RTs._rt = RefactoringTool(myfixes) RTs._rtp = RefactoringTool(myfixes, {: True})
Call this before using the refactoring tools to create them on demand if needed.
13,932
def divmod_neg(a, b): q, r = divmod(a, b) sr = np.sign(r) if np.abs(r) > b/2: q += sr r -= b * sr return q, r
Return divmod with closest result to zero
13,933
def _update_ret(ret, goids, go2color): if goids: ret[].update(goids) if go2color: for goid, color in go2color.items(): ret[][goid] = color
Update 'GOs' and 'go2color' in dict with goids and go2color.
13,934
def expand_env_variables(lines_enum): for line_number, line in lines_enum: for env_var, var_name in ENV_VAR_RE.findall(line): value = os.getenv(var_name) if not value: continue line = line.replace(env_var, value) yield line_number, line
Replace all environment variables that can be retrieved via `os.getenv`. The only allowed format for environment variables defined in the requirement file is `${MY_VARIABLE_1}` to ensure two things: 1. Strings that contain a `$` aren't accidentally (partially) expanded. 2. Ensure consistency across platforms for requirement files. These points are the result of a discusssion on the `github pull request #3514 <https://github.com/pypa/pip/pull/3514>`_. Valid characters in variable names follow the `POSIX standard <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited to uppercase letter, digits and the `_` (underscore).
13,935
def format_info_response(value): info = {} for line in value.decode().splitlines(): if not line or line[0] == : continue if in line: key, value = line.split(, 1) info[key] = parse_info_value(value) return info
Format the response from redis :param str value: The return response from redis :rtype: dict
13,936
def _lazy_load_units_by_code(): if UNITS_BY_CODE: return for unit in units.UNITS_BY_NAME.values(): UNITS_BY_CODE[unit.code] = unit
Populate dict of units by code iff UNITS_BY_CODE is empty.
13,937
def block(self, userId, minute): desc = { "name": "CodeSuccessReslut", "desc": " http 成功返回结果", "fields": [{ "name": "code", "type": "Integer", "desc": "返回码,200 为正常。" }, { "name": "errorMessage", "type": "String", "desc": "错误信息。" }] } r = self.call_api( method=(, , ), action=, params={"userId": userId, "minute": minute}) return Response(r, desc)
封禁用户方法(每秒钟限 100 次) 方法 @param userId:用户 Id。(必传) @param minute:封禁时长,单位为分钟,最大值为43200分钟。(必传) @return code:返回码,200 为正常。 @return errorMessage:错误信息。
13,938
def remove_context(self, name): context = self.get_context(name) contexts = self.get_contexts() contexts.remove(context)
Remove a context from kubeconfig.
13,939
def explain(code, signals=SIGNALS): if code not in signals: raise NoSuchSignal(code) signal, action, description = signals[code] return { : code, : signal, : action, : description, }
Explain what a given integer signal *code* does, including it's signal name. :param code: An integer signal. :param signals: A database of signals.
13,940
def start_plugins(conf, watcher_plugin_class, health_plugin_class, sleep_time): watcher_plugin = watcher_plugin_class(conf) watcher_plugin.start() health_plugin = health_plugin_class(conf) health_plugin.start() return watcher_plugin, health_plugin
Start the working threads: - Health monitor (the health plugin) - Config change monitor (the watcher plugin)
13,941
def add_dictionary(self, dictionary): if self.word_vectors is None: raise Exception() if len(dictionary) > self.word_vectors.shape[0]: raise Exception( ) self.dictionary = dictionary if hasattr(self.dictionary, ): items_iterator = self.dictionary.iteritems() else: items_iterator = self.dictionary.items() self.inverse_dictionary = {v: k for k, v in items_iterator}
Supply a word-id dictionary to allow similarity queries.
13,942
def complete_tags(arg): search_string = if arg is not None: search_string += arg res = Tag.search({ : , : , : search_string }) ret = [] for t in res[]: ret.append(t.name) return ret
Complete NIPAP prefix type
13,943
def import_settings(self, filename): if not os.path.isfile(filename): self._logger.log( , .format( filename ) ) else: with open(filename, ) as jsonFile: data = json.load(jsonFile) self._value_ranges = data[] self._best_values = data[] self._best_values = [] for index, value in enumerate(data[]): if self._value_ranges[index] == : self._best_values.append(int(value)) else: self._best_values.append(float(value)) self.minimize = data[] self.num_employers = data[] self._best_score = float(data[]) self.limit = data[]
Import settings from a JSON file Args: filename (string): name of the file to import from
13,944
def _multicomplex2(f, fx, x, h): n = len(x) ee = np.diag(h) hess = np.outer(h, h) cmplx_wrap = Bicomplex.__array_wrap__ for i in range(n): for j in range(i, n): zph = Bicomplex(x + 1j * ee[i, :], ee[j, :]) hess[i, j] = cmplx_wrap(f(zph)).imag12 / hess[j, i] hess[j, i] = hess[i, j] return hess
Calculate Hessian with Bicomplex-step derivative approximation
13,945
def run(command, num_retries=1, timeout=-1, **kwargs): last_error = None for _ in range(num_retries): try: process = Subprocess(command, **kwargs) return process.run(timeout) except Exception as err: last_error = err raise last_error
Run a command with optional timeout and retries. Provides a convenience method for executing a subprocess with additional error handling. Arguments: command (list of str): The command to execute. num_retries (int, optional): If the subprocess fails, the number of attempts to execute it before failing. timeout (float, optional): If positive, the number of seconds to wait for subprocess completion before failing. **kwargs: Additional args to pass to Subprocess.__init__() Returns: Tuple of (int, str, str): Where the variables represent (exit status, stdout, stderr). Raises: SubprocessError: If the command fails after the given number of retries.
13,946
def _add_sequence(self, pdbID, chainID, sequence): private pdbID = pdbID.upper() self[pdbID] = self.get(pdbID, {}) self[pdbID][chainID] = sequence self.sequences.append((pdbID, chainID, sequence)) if not self.unique_sequences.get(sequence): self.unique_sequences[sequence] = visible_colors[len(self.unique_sequences) % len(visible_colors)] self.identical_sequences = None
This is a 'private' function. If you call it directly, call _find_identical_sequences() afterwards to update identical_sequences.
13,947
def delay(self, n, start_time): if (n > self.max_retries or (n > self.min_retries and time.time() - start_time > self.max_retry_period)): return -1 return min( math.pow(self.backoff_factor, n-1) * self.initial_delay, self.max_delay)
Calculate delay before the next retry. Args: n: the number of current attempt. The first attempt should be 1. start_time: the time when retry started in unix time. Returns: Number of seconds to wait before next retry. -1 if retry should give up.
13,948
def print_factoids(input_dict, environment_dict): dict_mark = input_dict try: command = dict_mark.keys()[0] while dict_mark[command][] != : dict_mark = dict_mark[command][] command = dict_mark.keys()[0] args = command except IndexError: raise seash_exceptions.UserError("\nError, Syntax of the command is: show factoids [number of factoids]/all \n") if args == : print for factoid in factoids: print factoid print return try: no_of_factoids = int(args) except ValueError: raise seash_exceptions.UserError("\nYou have to enter number only.\n") if (no_of_factoids > (len(factoids))): print "\nWe have only %d factoids. Here is the list of factoids:" % (len(factoids)) no_of_factoids = len(factoids) elif (no_of_factoids <= 0): raise seash_exceptions.UserError("\nYou have to enter positive number only.\n") random.shuffle(factoids) for factoid in factoids[:no_of_factoids]: print factoid print
<Purpose> Used to print seash factoids when user uses 'show factoids' command. <Arguments> input_dict: Input dictionary generated by seash_dictionary.parse_command(). environment_dict: Dictionary describing the current seash environment. For more information, see command_callbacks.py's module docstring. <Side Effects> Prints factoids onto the screen. <Exceptions> UserError: If user does not type appropriate command. ValueError: If user does not provide valid input (integer). <Return> None
13,949
def include_revision(revision_num, skip_factor=1.1): if skip_factor <= 1.0: return True return (int(math.log1p(revision_num) / math.log(skip_factor)) != int( math.log(revision_num + 2.0) / math.log(skip_factor)))
Decide whether to include a revision. If the number of revisions is large, we exclude some revisions to avoid a quadratic blowup in runtime, since the article is likely also large. We make the ratio between consecutive included revision numbers appproximately equal to "factor". Args: revision_num: an integer skip_factor: a floating point number >= 1.0 Returns: a boolean
13,950
def get_scheme(self): scheme = Scheme("Github Repository Forks") scheme.description = "Streams events giving the number of forks of a GitHub repository." scheme.use_external_validation = True scheme.use_single_instance = True owner_argument = Argument("owner") owner_argument.title = "Owner" owner_argument.data_type = Argument.data_type_string owner_argument.description = "Github user or organization that created the repository." owner_argument.required_on_create = True scheme.add_argument(owner_argument) repo_name_argument = Argument("repo_name") repo_name_argument.title = "Repo Name" repo_name_argument.data_type = Argument.data_type_string repo_name_argument.description = "Name of the Github repository." repo_name_argument.required_on_create = True scheme.add_argument(repo_name_argument) return scheme
When Splunk starts, it looks for all the modular inputs defined by its configuration, and tries to run them with the argument --scheme. Splunkd expects the modular inputs to print a description of the input in XML on stdout. The modular input framework takes care of all the details of formatting XML and printing it. The user need only override get_scheme and return a new Scheme object. :return: scheme, a Scheme object
13,951
def exists(self, relpath): if self.isignored(self._append_slash_if_dir_path(relpath)): return False return self._exists_raw(relpath)
Returns True if path exists and is not ignored.
13,952
def get_option_help_info(self, args, kwargs): display_args = [] scoped_cmd_line_args = [] unscoped_cmd_line_args = [] for arg in args: is_short_arg = len(arg) == 2 unscoped_cmd_line_args.append(arg) if self._scope_prefix: scoped_arg = .format(self._scope_prefix, arg.lstrip()) else: scoped_arg = arg scoped_cmd_line_args.append(scoped_arg) if kwargs.get() == bool: if is_short_arg: display_args.append(scoped_arg) else: unscoped_cmd_line_args.append(.format(arg[2:])) scoped_cmd_line_args.append(.format(scoped_arg[2:])) display_args.append(.format(scoped_arg[2:])) else: metavar = self.compute_metavar(kwargs) display_arg = .format(scoped_arg, metavar) if is_list_option(kwargs): display_args.append(.format(arg_str=display_arg)) if metavar.startswith() and metavar.endswith(): display_args=display_args, scoped_cmd_line_args=scoped_cmd_line_args, unscoped_cmd_line_args=unscoped_cmd_line_args, typ=typ, default=default, help=help_msg, deprecated_message=deprecated_message, removal_version=removal_version, removal_hint=removal_hint, choices=choices) return ret
Returns an OptionHelpInfo for the option registered with the given (args, kwargs).
13,953
def main(args=None): if args is None: args = parse_args(sys.argv[1:]) if args.action == : conf, doc = Config.example_config() print(conf) sys.stderr.write(doc + "\n") return if args.verbose > 1: set_log_debug() elif args.verbose == 1: set_log_info() config = Config(args.config) if args.action == : aws = AWSInfo(config) aws.show_cloudwatch_logs(count=args.log_count) return if args.action == : api_id = get_api_id(config, args) aws = AWSInfo(config) aws.show_cloudwatch_logs( count=args.log_count, grp_name= % ( api_id, config.stage_name ) ) return if args.action == : aws = AWSInfo(config) aws.show_queue(name=args.queue_name, delete=args.queue_delete, count=args.msg_count) return if args.action == : run_test(config, args) return if args.action in [, , , ]: runner = TerraformRunner(config, args.tf_path) tf_ver = runner.tf_version else: tf_ver = tuple( [int(x) for x in args.tf_ver.split()] ) if args.action == or args.action == : func_gen = LambdaFuncGenerator(config) func_src = func_gen.generate() tf_gen = TerraformGenerator(config, tf_ver=tf_ver) tf_gen.generate(func_src) if args.action == : return if args.action == or args.action == : runner.apply(args.stream_tf) if config.get() is not None: aws = AWSInfo(config) aws.set_method_settings() elif args.action == : runner.plan(args.stream_tf) else: runner.destroy(args.stream_tf)
Main entry point
13,954
def delete_entry_tag(sender, instance, **kwargs): from ..models import ( Entry, EntryTag ) entry = Entry.objects.get_for_model(instance.content_object)[0] tag = instance.tag EntryTag.objects.filter(tag=tag, entry=entry).delete()
Deletes EntryTag for Entry corresponding to specified TaggedItemBase instance. :param sender: the sending TaggedItemBase class. :param instance: the TaggedItemBase instance.
13,955
def resolve_sid(sid): return d1_gmn.app.models.Chain.objects.get(sid__did=sid).head_pid.did
Get the PID to which the ``sid`` currently maps. Preconditions: - ``sid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_sid().
13,956
def iter_events(self, number=-1): url = self._build_url(, base_url=self._api) return self._iter(int(number), url, IssueEvent)
Iterate over events associated with this issue only. :param int number: (optional), number of events to return. Default: -1 returns all events available. :returns: generator of :class:`IssueEvent <github3.issues.event.IssueEvent>`\ s
13,957
def _svgcolor(color): return % tuple([int(255 * x) for x in [color.r, color.g, color.b]])
Convert a PyChart color object to an SVG rgb() value. See color.py.
13,958
def get_default_config_help(self): config = super(StatsiteHandler, self).get_default_config_help() config.update({ : , : , : , : , }) return config
Returns the help text for the configuration options for this handler
13,959
def initauth(self): headers = {: + clam.common.data.VERSION} if self.oauth: if not self.oauth_access_token: r = requests.get(self.url,headers=headers, verify=self.verify) if r.status_code == 404: raise clam.common.data.NotFound("Authorization provider not found") elif r.status_code == 403: raise clam.common.data.PermissionDenied("Authorization provider denies access") elif not (r.status_code >= 200 and r.status_code <= 299): raise Exception("An error occured, return code " + str(r.status_code)) data = self._parse(r.text) if data is True: raise Exception("No access token provided, but Authorization Provider requires manual user input. Unable to authenticate automatically. Obtain an access token from " + r.geturl()) else: self.oauth_access_token = data.oauth_access_token headers[] = + self.oauth_access_token return headers
Initialise authentication, for internal use
13,960
def parse_expression(self, expr): m = re.match(r, expr); prefix = m.group(1) tail = m.group(2) return [prefix, tail]
split expression into prefix and expression tested with ``` operator== != std::rel_ops::operator!= std::atomic::operator= std::array::operator[] std::function::operator() std::vector::at std::relational operators std::vector::begin std::abs(float) std::fabs() ```
13,961
def filter_query(self, query, filter_info, model): if filter_info: filters = create_filters(model, filter_info, self.resource) query = query.filter(*filters) return query
Filter query according to jsonapi 1.0 :param Query query: sqlalchemy query to sort :param filter_info: filter information :type filter_info: dict or None :param DeclarativeMeta model: an sqlalchemy model :return Query: the sorted query
13,962
def _rc_dbsize(self): "Returns the number of keys in the current database" result = 0 for alias, redisent in iteritems(self.redises): if alias.find() == -1: continue result += redisent.dbsize() return result
Returns the number of keys in the current database
13,963
def _create_element_list_(self): element_set = stoich.elements(self.compounds) return sorted(list(element_set))
Extract an alphabetically sorted list of elements from the compounds of the material. :returns: An alphabetically sorted list of elements.
13,964
def es_indexers(cls, base_class=None, role=, **kwargs): def _prop_filter(prop, value, **kwargs): try: use_prop = len(set(value.owl_inverseOf) - parent_props) > 0 except AttributeError: use_prop = True if prop in nested_props and use_prop: return True return False if not base_class: base_class = cls rtn_list = [] if kwargs.get("depth"): kwargs[] += 1 initial = False else: initial = True kwargs[] = 1 kwargs[] = cls.__name__ kwargs[] = cls if kwargs.get(): parent_props = set(cls.properties) else: parent_props = set() if role == : for value in cls.properties.values(): rtn_list += value.es_indexers(base_class, **kwargs) elif role == : if cls == base_class: nested_props = LABEL_FIELDS else: nested_props = cls.es_defs.get(, list(cls.properties.keys())) used_props = [value for prop, value in cls.properties.items() \ if _prop_filter(prop, value, **kwargs)] for value in cls.properties.values(): rtn_list += value.es_indexers(base_class, **kwargs) if cls.es_defs.get(,[None])[0]: rtn_list += [cls] return list(set(rtn_list))
Returns the es mapping for the class args: ----- base_class: The root class being indexed role: the role states how the class should be mapped depending upon whether it is used as a subject of an object. options are es_Nested or rdf_class
13,965
async def del_alternative(self, alt, timeout=OTGW_DEFAULT_TIMEOUT): cmd = OTGW_CMD_DEL_ALT alt = int(alt) if alt < 1 or alt > 255: return None ret = await self._wait_for_cmd(cmd, alt, timeout) if ret is not None: return int(ret)
Remove the specified Data-ID from the list of alternative commands. Only one occurrence is deleted. If the Data-ID appears multiple times in the list of alternative commands, this command must be repeated to delete all occurrences. The table of alternative Data-IDs is stored in non-volatile memory so it will persist even if the gateway has been powered off. Data-ID values from 1 to 255 are allowed. Return the ID that was removed from the list, or None on failure. This method is a coroutine
13,966
def _next_radiotap_extpm(pkt, lst, cur, s): if cur is None or (cur.present and cur.present.Ext): st = len(lst) + (cur is not None) return lambda *args: RadioTapExtendedPresenceMask(*args, index=st) return None
Generates the next RadioTapExtendedPresenceMask
13,967
def all_tensorboard_jobs(self): from db.models.tensorboards import TensorboardJob return TensorboardJob.all.filter(project=self)
Similar to tensorboard_jobs, but uses the default manager to return archived experiments as well.
13,968
def nest(*content): if len(content) == 0: raise ValueError() return And([LPF, content[0]] + list(itt.chain.from_iterable(zip(itt.repeat(C), content[1:]))) + [RPF])
Define a delimited list by enumerating each element of the list.
13,969
def list_qos_policies(self, retrieve_all=True, **_params): return self.list(, self.qos_policies_path, retrieve_all, **_params)
Fetches a list of all qos policies for a project.
13,970
def reload_components_ui(self): selected_components = self.get_selected_components() self.__engine.start_processing("Reloading Components ...", len(selected_components)) reload_failed_components = [] for component in selected_components: if component.interface.deactivatable: success = self.reload_component(component.name) or False if not success: reload_failed_components.append(component) else: self.__engine.notifications_manager.warnify( "{0} | Component cannot be deactivated and won{1}' Component(s)!".format(self.__class__.__name__, ", ".join( (reload_failed_component.name for reload_failed_component in reload_failed_components))))
Reloads user selected Components. :return: Method success. :rtype: bool :note: May require user interaction.
13,971
def returner(ret): _options = _get_options(ret) if not _verify_options(_options): return level = getattr(syslog, _options[]) facility = getattr(syslog, _options[]) logoption = 0 for opt in _options[]: logoption = logoption | getattr(syslog, opt) if in _options: syslog.openlog(ident=salt.utils.stringutils.to_str(_options[]), logoption=logoption) else: syslog.openlog(logoption=logoption) syslog.syslog(facility | level, salt.utils.json.dumps(ret)) syslog.closelog()
Return data to the local syslog
13,972
def _dispatch(self, typ): for can_handle, handler in self._handler_pairs: try: if can_handle(typ): return handler except Exception: pass raise KeyError("unable to find handler for {0}".format(typ))
returns the appropriate handler, for the object passed.
13,973
def generate(env): SCons.Tool.cc.generate(env) env[] = env.Detect(compilers) or if env[] in [, ]: env[] = SCons.Util.CLVar() else: env[] = SCons.Util.CLVar() if env[]: pipe = SCons.Action._subproc(env, [env[], ], stdin=, stderr=, stdout=subprocess.PIPE) if pipe.wait() != 0: return line = pipe.stdout.readline() if sys.version_info[0] > 2: line = line.decode() match = re.search(r, line) if match: env[] = match.group(1)
Add Builders and construction variables for clang to an Environment.
13,974
def switch_showfilter_icon(self, toggled): at = QtCore.Qt.DownArrow if toggled else QtCore.Qt.RightArrow self.showfilter_tb.setArrowType(at)
Switch the icon on the showfilter_tb :param toggled: the state of the button :type toggled: :class:`bool` :returns: None :rtype: None :raises: None
13,975
def get_model(model_id): assert_is_type(model_id, str) model_json = api("GET /3/Models/%s" % model_id)["models"][0] algo = model_json["algo"] if algo == "svd": m = H2OSVD() elif algo == "pca": m = H2OPrincipalComponentAnalysisEstimator() elif algo == "drf": m = H2ORandomForestEstimator() elif algo == "naivebayes": m = H2ONaiveBayesEstimator() elif algo == "kmeans": m = H2OKMeansEstimator() elif algo == "glrm": m = H2OGeneralizedLowRankEstimator() elif algo == "glm": m = H2OGeneralizedLinearEstimator() elif algo == "gbm": m = H2OGradientBoostingEstimator() elif algo == "deepwater": m = H2ODeepWaterEstimator() elif algo == "xgboost": m = H2OXGBoostEstimator() elif algo == "word2vec": m = H2OWord2vecEstimator() elif algo == "generic": m = H2OGenericEstimator() elif algo == "deeplearning": if model_json["output"]["model_category"] == "AutoEncoder": m = H2OAutoEncoderEstimator() else: m = H2ODeepLearningEstimator() elif algo == "stackedensemble": m = H2OStackedEnsembleEstimator() elif algo == "isolationforest": m = H2OIsolationForestEstimator() else: raise ValueError("Unknown algo type: " + algo) m._resolve_model(model_id, model_json) return m
Load a model from the server. :param model_id: The model identification in H2O :returns: Model object, a subclass of H2OEstimator
13,976
def _add_to_upload_queue(self, src, rfile, uid): ud = blobxfer.models.upload.Descriptor( src, rfile, uid, self._spec.options, self._general_options, self._resume) if ud.entity.is_encrypted: with self._upload_lock: self._ud_map[uid] = ud self._upload_queue.put(ud) if self._upload_start_time is None: with self._upload_lock: if self._upload_start_time is None: self._upload_start_time = blobxfer.util.datetime_now()
Add remote file to download queue :param Uploader self: this :param blobxfer.models.upload.LocalPath src: local path :param blobxfer.models.azure.StorageEntity rfile: remote file :param str uid: unique id
13,977
def query_feature(): args = get_args( request_args=request.args, allowed_str_args=[, , ], allowed_int_args=[] ) return jsonify(query.feature(**args))
Returns list of sequence feature by query parameters --- tags: - Query functions parameters: - name: type_ in: query type: string required: false description: Feature type default: 'splice variant' - name: identifier in: query type: string required: false description: Feature identifier default: VSP_045447 - name: description in: query type: string required: false description: Feature description default: 'In isoform 11.' - name: entry_name in: query type: string required: false description: UniProt entry name default: A4_HUMAN - name: limit in: query type: integer required: false description: limit of results numbers default: 10
13,978
def patch_namespaced_stateful_set_status(self, name, namespace, body, **kwargs): kwargs[] = True if kwargs.get(): return self.patch_namespaced_stateful_set_status_with_http_info(name, namespace, body, **kwargs) else: (data) = self.patch_namespaced_stateful_set_status_with_http_info(name, namespace, body, **kwargs) return data
partially update status of the specified StatefulSet This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_stateful_set_status(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the StatefulSet (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :return: V1StatefulSet If the method is called asynchronously, returns the request thread.
13,979
def dump_t_coords(dataset_dir, data_dir, dataset, root=None, compress=True): if root is None: root = {} tcoords = dataset.GetPointData().GetTCoords() if tcoords: dumped_array = dump_data_array(dataset_dir, data_dir, tcoords, {}, compress) root[][] = len(root[][]) root[][].append({: dumped_array})
dump vtkjs texture coordinates
13,980
def write(self, text): sys.stdout.write("\r") self._clear_line() _text = to_unicode(text) if PY2: _text = _text.encode(ENCODING) assert isinstance(_text, builtin_str) sys.stdout.write("{0}\n".format(_text))
Write text in the terminal without breaking the spinner.
13,981
def reliability_curve(self): total = self.frequencies["Total_Freq"].sum() curve = pd.DataFrame(columns=["Bin_Start", "Bin_End", "Bin_Center", "Positive_Relative_Freq", "Total_Relative_Freq"]) curve["Bin_Start"] = self.thresholds[:-1] curve["Bin_End"] = self.thresholds[1:] curve["Bin_Center"] = 0.5 * (self.thresholds[:-1] + self.thresholds[1:]) curve["Positive_Relative_Freq"] = self.frequencies["Positive_Freq"] / self.frequencies["Total_Freq"] curve["Total_Relative_Freq"] = self.frequencies["Total_Freq"] / total return curve
Calculates the reliability diagram statistics. The key columns are Bin_Start and Positive_Relative_Freq Returns: pandas.DataFrame
13,982
def store_async_result(async_id, async_result): logging.debug("Storing result for %s", async_id) key = FuriousAsyncMarker( id=async_id, result=json.dumps(async_result.to_dict()), status=async_result.status).put() logging.debug("Setting Async result %s using marker: %s.", async_result, key)
Persist the Async's result to the datastore.
13,983
def add_bits4subtree_ids(self, relevant_ids): if relevant_ids: checking = True else: checking = False relevant_ids = {} bit = 1 self.bits2internal_node = {} for node in self.postorder_node_iter(): p = node._parent if p is None: if not node.is_leaf: self.bits2internal_node[node.bits4subtree_ids] = node continue if not hasattr(p, ): p.bits4subtree_ids = 0 i = node._id if checking: b = relevant_ids.get(i) if b: if node.is_leaf: node.bits4subtree_ids = b else: node.bits4subtree_ids |= b else: if node.is_leaf: relevant_ids[i] = bit node.bits4subtree_ids = bit bit <<= 1 if not node.is_leaf: self.bits2internal_node[node.bits4subtree_ids] = node p.bits4subtree_ids |= node.bits4subtree_ids return relevant_ids
Adds a long integer bits4subtree_ids to each node (Fails cryptically if that field is already present!) relevant_ids can be a dict of _id to bit representation. If it is not supplied, a dict will be created by registering the leaf._id into a dict (and returning the dict) the bits4subtree_ids will have a 1 bit if the _id is at or descended from this node and 0 if it is not in this subtree. Returns the dict of ids -> longs Also creates a dict of long -> node mappings for all internal nodes. Stores this in self as bits2internal_node
13,984
def check_and_adjust_sighandler(self, signame, sigs): signum = lookup_signum(signame) try: old_handler = signal.getsignal(signum) except ValueError: sigs[signame].old_handler = old_handler pass try: self._orig_set_signal(signum, self.sigs[signame].handle) except ValueError: return False except KeyError: return False pass return True
Check to see if a single signal handler that we are interested in has changed or has not been set initially. On return self.sigs[signame] should have our signal handler. True is returned if the same or adjusted, False or None if error or not found.
13,985
def tag(self, name, user, revision=None, message=None, date=None, **kwargs): if name in self.tags: raise TagAlreadyExistError("Tag %s already exists" % name) changeset = self.get_changeset(revision) message = message or "Added tag %s for commit %s" % (name, changeset.raw_id) self._repo.refs["refs/tags/%s" % name] = changeset._commit.id self._parsed_refs = self._get_parsed_refs() self.tags = self._get_tags() return changeset
Creates and returns a tag for the given ``revision``. :param name: name for new tag :param user: full username, i.e.: "Joe Doe <[email protected]>" :param revision: changeset id for which new tag would be created :param message: message of the tag's commit :param date: date of tag's commit :raises TagAlreadyExistError: if tag with same name already exists
13,986
def repl(): while True: try: sys.stdout.write("Type in next query: \n> ") import locale query_str = raw_input().decode(sys.stdin.encoding or locale.getpreferredencoding(True)) except KeyboardInterrupt: break if u in query_str: break print_query_and_parse_tree(query_str)
Read-Eval-Print-Loop for reading the query, printing it and its parse tree. Exit the loop either with an interrupt or "quit".
13,987
def delete(self, group_id, session): request = TOPRequest() request[] = group_id self.create(self.execute(request, session), fields=[,]) return self.is_success
taobao.crm.group.delete 删除分组 将该分组下的所有会员移除出该组,同时删除该分组。注:删除分组为异步任务,必须先调用taobao.crm.grouptask.check 确保涉及属性上没有任务。
13,988
def get_config(key): key = .format(key.upper()) local_config = current_app.config.get(key) return local_config or getattr(theme.current, key, DEFAULTS[key])
Get an identicon configuration parameter. Precedance order is: - application config (`udata.cfg`) - theme config - default
13,989
def is_all_field_none(self): if self._id_ is not None: return False if self._created is not None: return False if self._updated is not None: return False if self._avatar is not None: return False if self._currency is not None: return False if self._description is not None: return False if self._daily_limit is not None: return False if self._daily_spent is not None: return False if self._balance is not None: return False if self._alias is not None: return False if self._public_uuid is not None: return False if self._status is not None: return False if self._sub_status is not None: return False if self._reason is not None: return False if self._reason_description is not None: return False if self._user_id is not None: return False if self._balance_maximum is not None: return False if self._budget_month_used is not None: return False if self._budget_month_maximum is not None: return False if self._budget_year_used is not None: return False if self._budget_year_maximum is not None: return False if self._budget_withdrawal_year_used is not None: return False if self._budget_withdrawal_year_maximum is not None: return False if self._notification_filters is not None: return False if self._setting is not None: return False return True
:rtype: bool
13,990
def get_return_elements(return_columns, namespace, subscript_dict): capture_elements = list() return_addresses = dict() for col in return_columns: if in col: name, location = col.strip().split() subs = [l.strip() for l in location.split()] address = make_coord_dict(subs, subscript_dict) else: name = col address = {} if name in namespace: py_name = namespace[name] else: if name in namespace.values(): py_name = name else: raise KeyError(name + " not found as model element") if py_name not in capture_elements: capture_elements += [py_name] return_addresses[col] = (py_name, address) return list(capture_elements), return_addresses
Takes a list of return elements formatted in vensim's format Varname[Sub1, SUb2] and returns first the model elements (in python safe language) that need to be computed and collected, and secondly the addresses that each element in the return columns list translates to Parameters ---------- return_columns: list of strings namespace subscript_dict Returns ------- capture_elements return_addresses Examples --------
13,991
def create_rule_section(self, name, add_pos=None, after=None, before=None): href = self.href params = None if add_pos is not None: href = self.add_at_position(add_pos) elif before or after: params = self.add_before_after(before, after) return ElementCreator( self.__class__, exception=CreateRuleFailed, href=href, params=params, json={: name})
Create a rule section in a Firewall Policy. To specify a specific numbering position for the rule section, use the `add_pos` field. If no position or before/after is specified, the rule section will be placed at the top which will encapsulate all rules below. Create a rule section for the relavant policy:: policy = FirewallPolicy('mypolicy') policy.fw_ipv4_access_rules.create_rule_section(name='attop') # For NAT rules policy.fw_ipv4_nat_rules.create_rule_section(name='mysection', add_pos=5) :param str name: create a rule section by name :param int add_pos: position to insert the rule, starting with position 1. If the position value is greater than the number of rules, the rule is inserted at the bottom. If add_pos is not provided, rule is inserted in position 1. Mutually exclusive with ``after`` and ``before`` params. :param str after: Rule tag to add this rule after. Mutually exclusive with ``add_pos`` and ``before`` params. :param str before: Rule tag to add this rule before. Mutually exclusive with ``add_pos`` and ``after`` params. :raises MissingRequiredInput: when options are specified the need additional setting, i.e. use_vpn action requires a vpn policy be specified. :raises CreateRuleFailed: rule creation failure :return: the created ipv4 rule :rtype: IPv4Rule
13,992
def from_url(location): req = urllib.request.Request(location) with urllib.request.urlopen(req) as response: the_page = response.read().decode() return the_page
HTTP request for page at location returned as string malformed url returns ValueError nonexistant IP returns URLError wrong subnet IP return URLError reachable IP, no HTTP server returns URLError reachable IP, HTTP, wrong page returns HTTPError
13,993
def _check_file_exists_unix(self, remote_cmd=""): if self.direction == "put": self.ssh_ctl_chan._enter_shell() remote_cmd = "ls {}".format(self.file_system) remote_out = self.ssh_ctl_chan.send_command( remote_cmd, expect_string=r"[\$ ) self.ssh_ctl_chan._return_cli() return self.dest_file in remote_out elif self.direction == "get": return os.path.exists(self.dest_file)
Check if the dest_file already exists on the file system (return boolean).
13,994
def get(self): config = self.get_block() if not config: return None response = dict() response.update(self._parse_bgp_as(config)) response.update(self._parse_router_id(config)) response.update(self._parse_max_paths(config)) response.update(self._parse_shutdown(config)) response.update(self._parse_networks(config)) response[] = self.neighbors.getall() return response
Returns the bgp routing configuration as a dict object
13,995
def _get_sorting_message(request, key): control_list = [] reverse = request.url.query.get(, None) if reverse is None: return control_list if reverse.lower() == "": control_list.append(client_list_control_pb2.ClientSortControls( reverse=True, keys=key.split(",") )) elif reverse.lower() != : control_list.append(client_list_control_pb2.ClientSortControls( reverse=True, keys=reverse.split(",") )) return control_list
Parses the reverse query into a list of ClientSortControls protobuf messages.
13,996
def _get_filename(request, item): if request.keep_image_names: filename = OgcImageService.finalize_filename(item[].replace(, )) else: filename = OgcImageService.finalize_filename( .join([str(GeopediaService._parse_layer(request.layer)), item[].rsplit(, 1)[-1]]), request.image_format ) LOGGER.debug("filename=%s", filename) return filename
Creates a filename
13,997
def write_exports(self, exports): rf = self.get_distinfo_file(EXPORTS_FILENAME) with open(rf, ) as f: write_exports(exports, f)
Write a dictionary of exports to a file in .ini format. :param exports: A dictionary of exports, mapping an export category to a list of :class:`ExportEntry` instances describing the individual export entries.
13,998
def _ParseTokenType(self, file_object, file_offset): token_type_map = self._GetDataTypeMap() token_type, _ = self._ReadStructureFromFileObject( file_object, file_offset, token_type_map) return token_type
Parses a token type. Args: file_object (dfvfs.FileIO): file-like object. file_offset (int): offset of the token relative to the start of the file-like object. Returns: int: token type
13,999
def create_app(name, site, sourcepath, apppool=None): *app0site0C:\\site0site0 current_apps = list_apps(site) if name in current_apps: log.debug(, name) return True if not os.path.isdir(sourcepath): log.error(, sourcepath) return False ps_cmd = [, , "".format(name), , "".format(site), , "".format(sourcepath)] if apppool: ps_cmd.extend([, "".format(apppool)]) cmd_ret = _srvmgr(ps_cmd) if cmd_ret[] != 0: msg = \ .format(name, cmd_ret[]) raise CommandExecutionError(msg) new_apps = list_apps(site) if name in new_apps: log.debug(, name) return True log.error(, name) return False
Create an IIS application. .. note:: This function only validates against the application name, and will return True even if the application already exists with a different configuration. It will not modify the configuration of an existing application. Args: name (str): The IIS application. site (str): The IIS site name. sourcepath (str): The physical path. apppool (str): The name of the IIS application pool. Returns: bool: True if successful, otherwise False CLI Example: .. code-block:: bash salt '*' win_iis.create_app name='app0' site='site0' sourcepath='C:\\site0' apppool='site0'