repo_id
stringlengths
15
132
file_path
stringlengths
34
176
content
stringlengths
2
3.52M
__index_level_0__
int64
0
0
promptflow_repo/promptflow/src/promptflow/promptflow/_sdk/data
promptflow_repo/promptflow/src/promptflow/promptflow/_sdk/data/docker/README.md
Exported Dockerfile & its dependencies are located in the same folder. The structure is as below: - flow: the folder contains all the flow files - ... - connections: the folder contains yaml files to create all related connections - ... - runit: the folder contains all the runit scripts - ... - Dockerfile: the dockerfile to build the image - start.sh: the script used in `CMD` of `Dockerfile` to start the service - settings.json: a json file to store the settings of the docker image - README.md: the readme file to describe how to use the dockerfile Please refer to [official doc](https://microsoft.github.io/promptflow/how-to-guides/deploy-and-export-a-flow.html#export-a-flow) for more details about how to use the exported dockerfile and scripts.
0
promptflow_repo/promptflow/src/promptflow/promptflow/_sdk/data
promptflow_repo/promptflow/src/promptflow/promptflow/_sdk/data/docker/start.sh.jinja2
#!/bin/bash # stop services created by runsv and propagate SIGINT, SIGTERM to child jobs sv_stop() { echo "$(date -uIns) - Stopping all runsv services" for s in $(ls -d /var/runit/*); do sv stop $s done } # register SIGINT, SIGTERM handler trap sv_stop SIGINT SIGTERM # start services in background and wait all child jobs runsvdir /var/runit & wait
0
promptflow_repo/promptflow/src/promptflow/promptflow/_sdk/data
promptflow_repo/promptflow/src/promptflow/promptflow/_sdk/data/docker/Dockerfile.jinja2
# syntax=docker/dockerfile:1 {% if env.image %} FROM {{env.image}} {% else %} {% if show_comment %} # use mcr.microsoft.com/azureml/openmpi4.1.0-ubuntu20.04:latest? current image is based on Debian 11 {% endif %} FROM docker.io/continuumio/miniconda3:latest {% endif %} WORKDIR / {% if env.python_requirements_txt %} COPY ./flow/{{env.python_requirements_txt}} /flow/{{env.python_requirements_txt}} {% endif %} # create conda environment {% if env.conda_file %} COPY ./flow/{{env.conda_file}} /flow/{{env.conda_file}} RUN conda create -f flow/{{env.conda_file}} -q && \ {% else %} RUN conda create -n {{env.conda_env_name}} python=3.9.16 pip=23.0.1 -q -y && \ {% endif %} conda run -n {{env.conda_env_name}} \ {% if env.python_requirements_txt %} pip install -r /flow/{{env.python_requirements_txt}} && \ {% else %} {% if env.sdk_version %} pip install promptflow=={{env.sdk_version}} \ {% else %} pip install promptflow \ {% endif %} promptflow-tools && \ {% endif %} conda run -n {{env.conda_env_name}} pip install keyrings.alt && \ conda run -n {{env.conda_env_name}} pip install gunicorn==20.1.0 && \ conda run -n {{env.conda_env_name}} pip cache purge && \ conda clean -a -y COPY ./flow /flow {% if env.setup_sh %} RUN conda run -n {{env.conda_env_name}} sh /flow/{{ env.setup_sh }} {% endif %} RUN apt-get update && apt-get install -y runit EXPOSE 8080 COPY ./connections/* /connections/ # reset runsvdir RUN rm -rf /var/runit COPY ./runit /var/runit # grant permission RUN chmod -R +x /var/runit COPY ./start.sh / CMD ["bash", "./start.sh"]
0
promptflow_repo/promptflow/src/promptflow/promptflow/_sdk/data/docker/runit
promptflow_repo/promptflow/src/promptflow/promptflow/_sdk/data/docker/runit/promptflow-serve/run.jinja2
#! /bin/bash CONDA_ENV_PATH="$(conda info --base)/envs/{{env.conda_env_name}}" export PATH="$CONDA_ENV_PATH/bin:$PATH" {% if connection_yaml_paths %} {% if show_comment %} # hack: for some unknown reason, without this ls, the connection creation will be failed {% endif %} ls ls /connections {% endif %} {% for connection_yaml_path in connection_yaml_paths %} pf connection create --file /{{ connection_yaml_path }} {% endfor %} echo "start promptflow serving with worker_num: 8, worker_threads: 1" cd /flow gunicorn -w 8 --threads 1 -b "0.0.0.0:8080" --timeout 300 "promptflow._sdk._serving.app:create_app()"
0
promptflow_repo/promptflow/src/promptflow/promptflow/_sdk/data/docker/runit
promptflow_repo/promptflow/src/promptflow/promptflow/_sdk/data/docker/runit/promptflow-serve/finish.jinja2
#!/bin/bash echo "$(date -uIns) - promptflow-serve/finish $@" # stop all gunicorn processes echo "$(date -uIns) - Stopping all Gunicorn processes" pkill gunicorn while pgrep gunicorn >/dev/null; do echo "$(date -uIns) - Gunicorn process is still running, waiting for 1s" sleep 1 done echo "$(date -uIns) - Stopped all Gunicorn processes"
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/version_hint_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import datetime import json import logging from promptflow._constants import (LAST_HINT_TIME, LAST_CHECK_TIME, PF_VERSION_CHECK, CLI_PACKAGE_NAME, HINT_INTERVAL_DAY, GET_PYPI_INTERVAL_DAY, LATEST_VERSION, CURRENT_VERSION) from promptflow._sdk._constants import HOME_PROMPT_FLOW_DIR HINT_ACTIVITY_NAME = ["pf.flows.test", "pf.runs.create_or_update", "pfazure.flows.create_or_update", "pfazure.runs.create_or_update"] logger = logging.getLogger(__name__) def get_cached_versions(): from promptflow._sdk._utils import read_write_by_user (HOME_PROMPT_FLOW_DIR / PF_VERSION_CHECK).touch(mode=read_write_by_user(), exist_ok=True) with open(HOME_PROMPT_FLOW_DIR / PF_VERSION_CHECK, "r") as f: try: cached_versions = json.load(f) except json.decoder.JSONDecodeError: cached_versions = {} return cached_versions def dump_cached_versions(cached_versions): with open(HOME_PROMPT_FLOW_DIR / PF_VERSION_CHECK, "w") as f: json.dump(cached_versions, f) def get_latest_version_from_pypi(package_name): pypi_url = f"https://pypi.org/pypi/{package_name}/json" try: import requests response = requests.get(pypi_url, timeout=3) if response.status_code == 200: data = response.json() latest_version = data["info"]["version"] return latest_version else: return None except Exception as ex: # pylint: disable=broad-except logger.debug(f"Failed to get the latest version from '{pypi_url}'. {str(ex)}") return None def check_latest_version(): """ Get the latest versions from a cached file""" cached_versions = get_cached_versions() last_check_time = datetime.datetime.strptime(cached_versions[LAST_CHECK_TIME], '%Y-%m-%d %H:%M:%S.%f') \ if LAST_CHECK_TIME in cached_versions else None if last_check_time is None or (datetime.datetime.now() > last_check_time + datetime.timedelta(days=GET_PYPI_INTERVAL_DAY)): version = get_latest_version_from_pypi(CLI_PACKAGE_NAME) if version is not None: cached_versions[LATEST_VERSION] = version cached_versions[LAST_CHECK_TIME] = str(datetime.datetime.now()) dump_cached_versions(cached_versions) def hint_for_update(): """ Check if there is a new version of prompt flow available every 7 days. IF yes, log debug info to hint customer to upgrade package. """ cached_versions = get_cached_versions() last_hint_time = datetime.datetime.strptime( cached_versions[LAST_HINT_TIME], '%Y-%m-%d %H:%M:%S.%f' ) if LAST_HINT_TIME in cached_versions else None if last_hint_time is None or (datetime.datetime.now() > last_hint_time + datetime.timedelta(days=HINT_INTERVAL_DAY)): from promptflow._sdk._utils import get_promptflow_sdk_version cached_versions[CURRENT_VERSION] = get_promptflow_sdk_version() if LATEST_VERSION in cached_versions: from packaging.version import parse if parse(cached_versions[CURRENT_VERSION]) < parse(cached_versions[LATEST_VERSION]): cached_versions[LAST_HINT_TIME] = str(datetime.datetime.now()) message = (f"New prompt flow version available: promptflow-{cached_versions[LATEST_VERSION]}. Running " f"'pf upgrade' to update CLI.") logger.debug(message) dump_cached_versions(cached_versions)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/multimedia_data_converter.py
import re from dataclasses import dataclass from enum import Enum from functools import partial from pathlib import Path from typing import Any, Callable from promptflow._utils.multimedia_utils import is_multimedia_dict class ResourceType(Enum): """ Enumeration of different types of multimedia resources. We support path, URL, and base64 data. """ PATH = "path" URL = "url" BASE64 = "base64" @dataclass class MultimediaInfo: """ Data class that holds information about a multimedia resource. """ mime_type: str # The MIME type of the multimedia resource. resource_type: ResourceType # The type of the resource as defined in ResourceType. content: str # The content of the multimedia resource (path, URL, or base64 string). class AbstractMultimediaFormatAdapter: """ Abstract base class for adapting multimedia formats. This class provides an interface for extracting multimedia information from various data formats or constructing data formats from multimedia information. Subclasses should implement methods for specific contract version. A MultimediaInfo object contains the mime_type, resource_type, and the actual content of the multimedia resource. The multimedia data is typically represented as a dictionary with keys and values conforming to a specific multimedia data contract. One multimedia data example from 20231201 version: {"data:image/jpg;path": "logo.jpg"} """ # Check if the original_data is a multimedia format according to the current contract version. def is_valid_format(self, original_data: Any): raise NotImplementedError() def extract_info(self, original_data: Any) -> MultimediaInfo: """ Get the MultimediaInfo from the original data. Will include mime_type, resource_type, and content. Below is an example for the 20231201 version: {"data:image/jpg;path": "logo.jpg"} -> "image/jpg", "path", "logo.jpg" """ raise NotImplementedError() def create_data(self, info: MultimediaInfo) -> Any: """ Create multimedia data from info. Below is an example for the 20231201 version: "image/jpg", "path", "logo.jpg" -> {"data:image/jpg;path": "logo.jpg"} """ raise NotImplementedError() class MultimediaFormatAdapter20231201(AbstractMultimediaFormatAdapter): """ 20231201 version is our first contract's version, supports text and images (path/url/base64). 20231201 is the version number assigned by the customer in the YAML file. Path format example: {"data:image/jpg;path": "logo.jpg"} Url format example: {"data:image/jpg;url": "https://example.com/logo.jpg"} Base64 format example: {"data:image/jpg;base64": "base64 string"} """ MIME_PATTERN = re.compile(r"^data:(.*);(path|base64|url)$") def is_valid_format(self, original_data: Any): return isinstance(original_data, dict) and is_multimedia_dict(original_data) def extract_info(self, original_data: Any) -> MultimediaInfo: if not self.is_valid_format(original_data): return None for key in original_data: match = re.match(self.MIME_PATTERN, key) if match: mime_type, resource_type = match.group(1), match.group(2) content = original_data[key] return MultimediaInfo(mime_type, ResourceType(resource_type), content) return None def create_data(self, info: MultimediaInfo): return {f"data:{info.mime_type};{info.resource_type.value}": info.content} class AbstractMultimediaInfoConverter: def convert(self, info: MultimediaInfo) -> MultimediaInfo: """ Change info's mime type/resource type/content based on the client's logic. For cases that do not need to be changed, just return the original info. :param info: The MultimediaInfo to be converted. :type info: MultimediaInfo :return: The converted MultimediaInfo. :rtype: MultimediaInfo """ raise NotImplementedError() class MultimediaConverter: def __init__(self, flow_file: Path): """ Initialize the MultimediaConverter. :param flow_file: The path to the YAML file. The YAML content will be used to determine the contract version. :type flow_file: Path """ # TODO: check yaml content to determine the current contract version. # Different contract version will have different multimedia format. # The version exists in the yaml file, so we need to load the yaml to get version and init converter. self.format_adapter = MultimediaFormatAdapter20231201() def convert_content_recursively(self, content: Any, client_converter: AbstractMultimediaInfoConverter): """ Recursively converts multimedia data format in content. :param content: The object that may contain multimedia data. :type content: Any :param client_converter: The converter to modify multimedia info based on the client's logic. :type client_converter: AbstractMultimediaInfoConverter :return: The content with changed multimedia format. :rtype: Any """ process_func = partial(self._convert_content, converter=client_converter) return self._process_content_recursively(content, process_func=process_func) def _convert_content(self, original_data: Any, converter: AbstractMultimediaInfoConverter): if not self.format_adapter.is_valid_format(original_data): return original_data info = self.format_adapter.extract_info(original_data) # When can't extract multimedia info from original_data, return original_data directly. if info is None: return original_data info = converter.convert(info) return self.format_adapter.create_data(info) def _process_content_recursively(self, content: Any, process_func: Callable): if isinstance(content, list): return [self._process_content_recursively(item, process_func) for item in content] elif isinstance(content, dict): if self.format_adapter.is_valid_format(content): return process_func(original_data=content) else: return {k: self._process_content_recursively(v, process_func) for k, v in content.items()} else: return content
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/openai_metrics_calculator.py
import tiktoken from importlib.metadata import version from promptflow.exceptions import UserErrorException IS_LEGACY_OPENAI = version("openai").startswith("0.") class OpenAIMetricsCalculator: def __init__(self, logger=None) -> None: self._logger = logger def get_openai_metrics_from_api_call(self, api_call: dict): total_metrics = {} if self._need_collect_metrics(api_call): try: metrics = self._get_openai_metrics_for_signal_api(api_call) self.merge_metrics_dict(total_metrics, metrics) except Exception as ex: self._log_warning(f"Failed to calculate metrics due to exception: {ex}.") children = api_call.get("children") if children is not None: for child in children: child_metrics = self.get_openai_metrics_from_api_call(child) self.merge_metrics_dict(total_metrics, child_metrics) api_call["system_metrics"] = total_metrics return total_metrics def _need_collect_metrics(self, api_call: dict): if api_call.get("type") != "LLM": return False output = api_call.get("output") if not isinstance(output, dict) and not isinstance(output, list): return False inputs = api_call.get("inputs") if not isinstance(inputs, dict): return False return True def _get_openai_metrics_for_signal_api(self, api_call: dict): output = api_call.get("output") if isinstance(output, dict): usage = output.get("usage") if isinstance(usage, dict): return usage self._log_warning( "Cannot find openai metrics in output, " "will calculate metrics from response data directly." ) name = api_call.get("name") # Support both legacy api and OpenAI v1 api # Legacy api: # https://github.com/openai/openai-python/blob/v0.28.1/openai/api_resources/chat_completion.py # https://github.com/openai/openai-python/blob/v0.28.1/openai/api_resources/completion.py # OpenAI v1 api: # https://github.com/openai/openai-python/blob/main/src/openai/resources/chat/completions.py # https://github.com/openai/openai-python/blob/main/src/openai/resources/completions.py if ( name == "openai.api_resources.chat_completion.ChatCompletion.create" or name == "openai.resources.chat.completions.Completions.create" # openai v1 ): return self._get_openai_metrics_for_chat_api(api_call) elif ( name == "openai.api_resources.completion.Completion.create" or name == "openai.resources.completions.Completions.create" # openai v1 ): return self._get_openai_metrics_for_completion_api(api_call) else: raise CalculatingMetricsError(f"Calculating metrics for api {name} is not supported.") def _try_get_model(self, inputs, output): if IS_LEGACY_OPENAI: api_type = inputs.get("api_type") if not api_type: raise CalculatingMetricsError("Cannot calculate metrics for none or empty api_type.") if api_type == "azure": model = inputs.get("engine") else: model = inputs.get("model") else: if isinstance(output, dict): model = output.get("model") else: model = output[0].model if len(output) > 0 and hasattr(output[0], "model") else None if not model: model = inputs.get("model") if not model: raise CalculatingMetricsError( "Cannot get a valid model to calculate metrics. " "Please specify a engine for AzureOpenAI API or a model for OpenAI API." ) return model def _get_openai_metrics_for_chat_api(self, api_call): inputs = api_call.get("inputs") output = api_call.get("output") metrics = {} enc, tokens_per_message, tokens_per_name = self._get_encoding_for_chat_api(self._try_get_model(inputs, output)) metrics["prompt_tokens"] = self._get_prompt_tokens_from_messages( inputs["messages"], enc, tokens_per_message, tokens_per_name ) if isinstance(output, list): if IS_LEGACY_OPENAI: metrics["completion_tokens"] = len(output) else: metrics["completion_tokens"] = len( [chunk for chunk in output if chunk.choices and chunk.choices[0].delta.content] ) else: metrics["completion_tokens"] = self._get_completion_tokens_for_chat_api(output, enc) metrics["total_tokens"] = metrics["prompt_tokens"] + metrics["completion_tokens"] return metrics def _get_encoding_for_chat_api(self, model): try: enc = tiktoken.encoding_for_model(model) except KeyError: enc = tiktoken.get_encoding("cl100k_base") if model == "gpt-35-turbo-0301": tokens_per_message = 4 tokens_per_name = -1 elif "gpt-35-turbo" in model or "gpt-3.5-turbo" in model or "gpt-4" in model: tokens_per_message = 3 tokens_per_name = 1 else: raise CalculatingMetricsError(f"Calculating metrics for model {model} is not supported.") return enc, tokens_per_message, tokens_per_name def _get_prompt_tokens_from_messages(self, messages, enc, tokens_per_message, tokens_per_name): prompt_tokens = 0 for message in messages: prompt_tokens += tokens_per_message for key, value in message.items(): prompt_tokens += len(enc.encode(value)) if key == "name": prompt_tokens += tokens_per_name prompt_tokens += 3 return prompt_tokens def _get_completion_tokens_for_chat_api(self, output, enc): completion_tokens = 0 choices = output.get("choices") if isinstance(choices, list): for ch in choices: if isinstance(ch, dict): message = ch.get("message") if isinstance(message, dict): content = message.get("content") if isinstance(content, str): completion_tokens += len(enc.encode(content)) return completion_tokens def _get_openai_metrics_for_completion_api(self, api_call: dict): metrics = {} inputs = api_call.get("inputs") output = api_call.get("output") enc = self._get_encoding_for_completion_api(self._try_get_model(inputs, output)) metrics["prompt_tokens"] = 0 prompt = inputs.get("prompt") if isinstance(prompt, str): metrics["prompt_tokens"] = len(enc.encode(prompt)) elif isinstance(prompt, list): for pro in prompt: metrics["prompt_tokens"] += len(enc.encode(pro)) if isinstance(output, list): if IS_LEGACY_OPENAI: metrics["completion_tokens"] = len(output) else: metrics["completion_tokens"] = len( [chunk for chunk in output if chunk.choices and chunk.choices[0].text] ) else: metrics["completion_tokens"] = self._get_completion_tokens_for_completion_api(output, enc) metrics["total_tokens"] = metrics["prompt_tokens"] + metrics["completion_tokens"] return metrics def _get_encoding_for_completion_api(self, model): try: return tiktoken.encoding_for_model(model) except KeyError: return tiktoken.get_encoding("p50k_base") def _get_completion_tokens_for_completion_api(self, output, enc): completion_tokens = 0 choices = output.get("choices") if isinstance(choices, list): for ch in choices: if isinstance(ch, dict): text = ch.get("text") if isinstance(text, str): completion_tokens += len(enc.encode(text)) return completion_tokens def merge_metrics_dict(self, metrics: dict, metrics_to_merge: dict): for k, v in metrics_to_merge.items(): metrics[k] = metrics.get(k, 0) + v def _log_warning(self, msg): if self._logger: self._logger.warning(msg) class CalculatingMetricsError(UserErrorException): """The exception that is raised when calculating metrics failed.""" pass
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/context_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- """!!!Note: context in this file only used for command line related logics, please avoid using them in service code!!!""" import contextlib import os import sys @contextlib.contextmanager def _change_working_dir(path, mkdir=True): """Context manager for changing the current working directory""" saved_path = os.getcwd() if mkdir: os.makedirs(path, exist_ok=True) os.chdir(str(path)) try: yield finally: os.chdir(saved_path) @contextlib.contextmanager def inject_sys_path(path): original_sys_path = sys.path.copy() sys.path.insert(0, str(path)) try: yield finally: sys.path = original_sys_path
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/execution_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from typing import AbstractSet, Any, Dict, List, Mapping from promptflow._utils.logger_utils import logger from promptflow.contracts.flow import Flow, FlowInputDefinition, InputValueType from promptflow.contracts.run_info import FlowRunInfo, Status def apply_default_value_for_input(inputs: Dict[str, FlowInputDefinition], line_inputs: Mapping) -> Dict[str, Any]: updated_inputs = dict(line_inputs or {}) for key, value in inputs.items(): if key not in updated_inputs and (value and value.default is not None): updated_inputs[key] = value.default return updated_inputs def handle_line_failures(run_infos: List[FlowRunInfo], raise_on_line_failure: bool = False): """Handle line failures in batch run""" failed = [i for i, r in enumerate(run_infos) if r.status == Status.Failed] failed_msg = None if len(failed) > 0: failed_indexes = ",".join([str(i) for i in failed]) first_fail_exception = run_infos[failed[0]].error["message"] if raise_on_line_failure: failed_msg = "Flow run failed due to the error: " + first_fail_exception raise Exception(failed_msg) failed_msg = ( f"{len(failed)}/{len(run_infos)} flow run failed, indexes: [{failed_indexes}]," f" exception of index {failed[0]}: {first_fail_exception}" ) logger.error(failed_msg) def get_aggregation_inputs_properties(flow: Flow) -> AbstractSet[str]: """Return the serialized InputAssignment of the aggregation nodes inputs. For example, an aggregation node refers the outputs of a node named "grade", then this function will return set("${grade.output}"). """ normal_node_names = {node.name for node in flow.nodes if flow.is_normal_node(node.name)} properties = set() for node in flow.nodes: if node.name in normal_node_names: continue for value in node.inputs.values(): if not value.value_type == InputValueType.NODE_REFERENCE: continue if value.value in normal_node_names: properties.add(value.serialize()) return properties def collect_lines(indexes: List[int], kvs: Mapping[str, List]) -> Mapping[str, List]: """Collect the values from the kvs according to the indexes.""" return {k: [v[i] for i in indexes] for k, v in kvs.items()}
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- """This is a common util file. !!!Please do not include any project related import.!!! """ import contextlib import contextvars import functools import importlib import json import logging import os import re import time import traceback from datetime import datetime from pathlib import Path from typing import Any, Dict, Iterable, Iterator, List, Optional, TypeVar, Union from promptflow._constants import DEFAULT_ENCODING T = TypeVar("T") class AttrDict(dict): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def __getattr__(self, item): if item in self: return self.__getitem__(item) return super().__getattribute__(item) def camel_to_snake(text: str) -> Optional[str]: text = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", text) return re.sub("([a-z0-9])([A-Z])", r"\1_\2", text).lower() class DateTimeEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, datetime): return o.isoformat() return json.JSONEncoder.default(self, o) def is_json_serializable(value: Any) -> bool: try: json.dumps(value) return True except TypeError: return False def load_json(file_path: Union[str, Path]) -> dict: if os.path.getsize(file_path) > 0: with open(file_path, "r") as f: return json.load(f) return {} def dump_list_to_jsonl(file_path: Union[str, Path], list_data: List[Dict]): with open(file_path, "w", encoding=DEFAULT_ENCODING) as jsonl_file: for data in list_data: json.dump(data, jsonl_file, ensure_ascii=False) jsonl_file.write("\n") def transpose(values: List[Dict[str, Any]], keys: Optional[List] = None) -> Dict[str, List]: keys = keys or list(values[0].keys()) return {key: [v.get(key) for v in values] for key in keys} def reverse_transpose(values: Dict[str, List]) -> List[Dict[str, Any]]: # Setup a result list same len with values value_lists = list(values.values()) _len = len(value_lists[0]) if any(len(value_list) != _len for value_list in value_lists): raise Exception(f"Value list of each key must have same length, please check {values!r}.") result = [] for i in range(_len): result.append({}) for key, vals in values.items(): for _idx, val in enumerate(vals): result[_idx][key] = val return result def deprecated(f=None, replace=None, version=None): if f is None: return functools.partial(deprecated, replace=replace, version=version) msg = [f"Function {f.__qualname__!r} is deprecated."] if version: msg.append(f"Deprecated since version {version}.") if replace: msg.append(f"Use {replace!r} instead.") msg = " ".join(msg) @functools.wraps(f) def wrapper(*args, **kwargs): logging.warning(msg) return f(*args, **kwargs) return wrapper def try_import(module, error_message, raise_error=True): try: importlib.import_module(module) except ImportError as e: ex_message = f"{error_message} Root cause: {e!r}" logging.warning(ex_message) if raise_error: raise Exception(ex_message) def is_in_ci_pipeline(): if os.environ.get("IS_IN_CI_PIPELINE") == "true": return True return False def count_and_log_progress( inputs: Iterable[T], logger: logging.Logger, total_count: int, formatter="{count} / {total_count} finished." ) -> Iterator[T]: log_interval = max(int(total_count / 10), 1) count = 0 for item in inputs: count += 1 if count % log_interval == 0 or count == total_count: logger.info(formatter.format(count=count, total_count=total_count)) yield item def log_progress( run_start_time: datetime, logger: logging.Logger, count: int, total_count: int, formatter="Finished {count} / {total_count} lines.", *, last_log_count: Optional[int] = None, ): # Calculate log_interval to determine when to log progress. # If total_count is less than 100, log every 10% of total_count; otherwise, log every 10 lines. log_interval = min(10, max(int(total_count / 10), 1)) # If last_log_count is not None, determine whether to log based on whether the difference # between the current count and the previous count exceeds log_interval. # Otherwise, decide based on whether the current count is evenly divisible by log_interval. if last_log_count: log_flag = (count - last_log_count) >= log_interval else: log_flag = count % log_interval == 0 if count > 0 and (log_flag or count == total_count): average_execution_time = round((datetime.utcnow().timestamp() - run_start_time.timestamp()) / count, 2) estimated_execution_time = round(average_execution_time * (total_count - count), 2) logger.info(formatter.format(count=count, total_count=total_count)) logger.info( f"Average execution time for completed lines: {average_execution_time} seconds. " f"Estimated time for incomplete lines: {estimated_execution_time} seconds." ) def extract_user_frame_summaries(frame_summaries: List[traceback.FrameSummary]): from promptflow import _core core_folder = os.path.dirname(_core.__file__) for i in range(len(frame_summaries) - 1): cur_file = frame_summaries[i].filename next_file = frame_summaries[i + 1].filename # If the current frame is in _core folder and the next frame is not in _core folder # then we can say that the next frame is in user code. if cur_file.startswith(core_folder) and not next_file.startswith(core_folder): return frame_summaries[i + 1 :] return frame_summaries def format_user_stacktrace(frame): # TODO: Maybe we can filter all frames from our code base to make it clean? frame_summaries = traceback.extract_stack(frame) user_frame_summaries = extract_user_frame_summaries(frame_summaries) return traceback.format_list(user_frame_summaries) def generate_elapsed_time_messages(func_name: str, start_time: float, interval: int, thread_id: int): import sys frames = sys._current_frames() if thread_id not in frames: thread_msg = ( f"thread {thread_id} cannot be found in sys._current_frames, " + "maybe it has been terminated due to unexpected errors." ) else: frame = frames[thread_id] stack_msgs = format_user_stacktrace(frame) stack_msg = "".join(stack_msgs) thread_msg = f"stacktrace of thread {thread_id}:\n{stack_msg}" elapse_time = time.perf_counter() - start_time # Make elapse time a multiple of interval. elapse_time = round(elapse_time / interval) * interval msgs = [f"{func_name} has been running for {elapse_time:.0f} seconds, {thread_msg}"] return msgs def set_context(context: contextvars.Context): for var, value in context.items(): var.set(value) def convert_inputs_mapping_to_param(inputs_mapping: dict): """Use this function to convert inputs_mapping to a string that can be passed to component as a string parameter, we have to do this since we can't pass a dict as a parameter to component. # TODO: Finalize the format of inputs_mapping """ return ",".join([f"{k}={v}" for k, v in inputs_mapping.items()]) @contextlib.contextmanager def environment_variable_overwrite(key, val): if key in os.environ.keys(): backup_value = os.environ[key] else: backup_value = None os.environ[key] = val try: yield finally: if backup_value: os.environ[key] = backup_value else: os.environ.pop(key) def resolve_dir_to_absolute(base_dir: Union[str, Path], sub_dir: Union[str, Path]) -> Path: """Resolve directory to absolute path with base_dir as root""" path = sub_dir if isinstance(sub_dir, Path) else Path(sub_dir) if not path.is_absolute(): base_dir = base_dir if isinstance(base_dir, Path) else Path(base_dir) path = base_dir / sub_dir return path def parse_ua_to_dict(ua): """Parse string user agent to dict with name as ua name and value as ua version.""" ua_dict = {} ua_list = ua.split(" ") for item in ua_list: if item: key, value = item.split("/") ua_dict[key] = value return ua_dict # TODO: Add "conditions" parameter to pass in a list of lambda functions # to check if the environment variable is valid. def get_int_env_var(env_var_name, default_value=None): """ The function `get_int_env_var` retrieves an integer environment variable value, with an optional default value if the variable is not set or cannot be converted to an integer. :param env_var_name: The name of the environment variable you want to retrieve the value of :param default_value: The default value is the value that will be returned if the environment variable is not found or if it cannot be converted to an integer :return: an integer value. """ try: return int(os.environ.get(env_var_name, default_value)) except Exception: return default_value def prompt_y_n(msg, default=None): if default not in [None, "y", "n"]: raise ValueError("Valid values for default are 'y', 'n' or None") y = "Y" if default == "y" else "y" n = "N" if default == "n" else "n" while True: ans = prompt_input("{} ({}/{}): ".format(msg, y, n)) if ans.lower() == n.lower(): return False if ans.lower() == y.lower(): return True if default and not ans: return default == y.lower() def prompt_input(msg): return input("\n===> " + msg)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/thread_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import contextvars import logging import threading from promptflow._utils.utils import set_context class RepeatLogTimer(threading.Timer): """Repeat to log message every interval seconds until it is cancelled.""" def __init__( self, interval_seconds: float, logger: logging.Logger, level: int, log_message_function, args: tuple = None ): self._logger = logger self._level = level self._log_message_function = log_message_function self._function_args = args if args else tuple() self._context = contextvars.copy_context() super().__init__(interval_seconds, function=None) def __enter__(self): self.start() return self def __exit__(self, *args): self.cancel() def run(self): """Override Timer.run method.""" # Set context variables from parent context. set_context(self._context) while not self.finished.wait(self.interval): if not self.finished.is_set(): msgs = self._log_message_function(*self._function_args) for msg in msgs: self._logger.log(self._level, msg) self.finished.set()
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/feature_utils.py
from dataclasses import dataclass from enum import Enum from typing import Optional class FeatureState(Enum): """The enum of feature state. READY: The feature is ready to use. E2ETEST: The feature is not ready to be shipped to customer and is in e2e testing. """ READY = "Ready" E2ETEST = "E2ETest" @dataclass class Feature: """The dataclass of feature.""" name: str description: str state: FeatureState component: Optional[str] = "executor" def get_feature_list(): feature_list = [ Feature( name="ActivateConfig", description="Bypass node execution when the node does not meet activate condition.", state=FeatureState.READY, ), Feature( name="Image", description="Support image input and output.", state=FeatureState.READY, ), Feature( name="EnvironmentVariablesInYaml", description="Support environment variables in flow.dag.yaml.", state=FeatureState.READY, ), ] return feature_list
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/flow_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import hashlib import os from os import PathLike from pathlib import Path from typing import Union from promptflow._sdk._constants import DAG_FILE_NAME, DEFAULT_ENCODING from promptflow._utils.logger_utils import LoggerFactory from promptflow._utils.yaml_utils import dump_yaml, load_yaml logger = LoggerFactory.get_logger(name=__name__) def get_flow_lineage_id(flow_dir: Union[str, PathLike]): """ Get the lineage id for flow. The flow lineage id will be same for same flow in same GIT repo or device. If the flow locates in GIT repo: use Repo name + relative path to flow_dir as session id Otherwise: use device id + absolute path to flow_dir as session id :param flow_dir: flow directory """ flow_dir = Path(flow_dir).resolve() if not flow_dir.is_dir(): flow_dir = flow_dir.parent try: from git import Repo repo = Repo(flow_dir, search_parent_directories=True) lineage_id = f"{os.path.basename(repo.working_dir)}/{flow_dir.relative_to(repo.working_dir).as_posix()}" logger.debug("Got lineage id %s from git repo.", lineage_id) except Exception: # failed to get repo, use device id + absolute path to flow_dir as session id import uuid device_id = uuid.getnode() lineage_id = f"{device_id}/{flow_dir.absolute().as_posix()}" logger.debug("Got lineage id %s from local since failed to get git info.", lineage_id) # hash the value to avoid it gets too long, and it's not user visible. lineage_id = hashlib.sha256(lineage_id.encode()).hexdigest() return lineage_id def resolve_flow_path(flow_path: Path): """Resolve given flow path to dag file path.""" if flow_path.is_dir(): flow_path = flow_path / DAG_FILE_NAME return flow_path def load_flow_dag(flow_path: Path): """Load flow dag from given flow path.""" flow_path = resolve_flow_path(flow_path) if not flow_path.exists(): raise FileNotFoundError(f"Flow file {flow_path} not found") with open(flow_path, "r", encoding=DEFAULT_ENCODING) as f: flow_dag = load_yaml(f) return flow_path, flow_dag def dump_flow_dag(flow_dag: dict, flow_path: Path): """Dump flow dag to given flow path.""" flow_path = resolve_flow_path(flow_path) with open(flow_path, "w", encoding=DEFAULT_ENCODING) as f: dump_yaml(flow_dag, f) return flow_path
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # ---------------------------------------------------------
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/load_data.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import logging import os from pathlib import Path from typing import Any, Dict, List, Tuple, Union from promptflow.exceptions import ErrorTarget, UserErrorException module_logger = logging.getLogger(__name__) def _pd_read_file(local_path: str, logger: logging.Logger = None, max_rows_count: int = None) -> "DataFrame": import pandas as pd local_path = str(local_path) # if file is empty, return empty DataFrame directly if ( os.path.getsize(local_path) == 0 ): # CodeQL [SM01305] Safe use per local_path is set by PRT service not by end user return pd.DataFrame() # load different file formats # set dtype to object to avoid auto type conversion # executor will apply type conversion based on flow definition, so no conversion should be acceptable # note that for csv and tsv format, this will make integer and float columns to be string; # for rest, integer will be int and float will be float dtype = object if local_path.endswith(".csv"): df = pd.read_csv(local_path, dtype=dtype, keep_default_na=False, nrows=max_rows_count) elif local_path.endswith(".json"): df = pd.read_json(local_path, dtype=dtype) elif local_path.endswith(".jsonl"): df = pd.read_json(local_path, dtype=dtype, lines=True, nrows=max_rows_count) elif local_path.endswith(".tsv"): df = pd.read_table(local_path, dtype=dtype, keep_default_na=False, nrows=max_rows_count) elif local_path.endswith(".parquet"): df = pd.read_parquet(local_path) # read_parquet has no parameter dtype else: # parse file as jsonl when extension is not known (including unavailable) # ignore and logging if failed to load file content. try: df = pd.read_json(local_path, dtype=dtype, lines=True, nrows=max_rows_count) except: # noqa: E722 if logger is None: logger = module_logger logger.warning( f"File {Path(local_path).name} is not supported format: " f"csv, tsv, json, jsonl, parquet. Ignoring it." ) return pd.DataFrame() return df def _bfs_dir(dir_path: List[str]) -> Tuple[List[str], List[str]]: """BFS traverse directory with depth 1, returns files and directories""" files, dirs = [], [] for path in dir_path: for filename in os.listdir(path): file = Path(path, filename).resolve() if file.is_file(): files.append(str(file)) else: dirs.append(str(file)) return files, dirs def _handle_dir(dir_path: str, max_rows_count: int, logger: logging.Logger = None) -> "DataFrame": """load data from directory""" import pandas as pd df = pd.DataFrame() # BFS traverse directory to collect files to load target_dir = [str(dir_path)] while len(target_dir) > 0: files, dirs = _bfs_dir(target_dir) for file in files: current_df = _pd_read_file(file, logger=logger, max_rows_count=max_rows_count) df = pd.concat([df, current_df]) length = len(df) if max_rows_count and length >= max_rows_count: df = df.head(max_rows_count) return df # no readable data in current level, dive into next level target_dir = dirs return df def load_data( local_path: Union[str, Path], *, logger: logging.Logger = None, max_rows_count: int = None ) -> List[Dict[str, Any]]: """load data from local file""" df = load_df(local_path, logger, max_rows_count=max_rows_count) # convert dataframe to list of dict result = [] for _, row in df.iterrows(): result.append(row.to_dict()) return result def load_df(local_path: Union[str, Path], logger: logging.Logger = None, max_rows_count: int = None) -> "DataFrame": """load data from local file to df. For the usage of PRS.""" lp = local_path if isinstance(local_path, Path) else Path(local_path) try: if lp.is_file(): df = _pd_read_file(local_path, logger=logger, max_rows_count=max_rows_count) # honor max_rows_count if it is specified if max_rows_count and len(df) > max_rows_count: df = df.head(max_rows_count) else: df = _handle_dir(local_path, max_rows_count=max_rows_count, logger=logger) except ValueError as e: raise InvalidUserData( message_format="Fail to load invalid data. We support file formats: csv, tsv, json, jsonl, parquet. " "Please check input data." ) from e return df class InvalidUserData(UserErrorException): def __init__(self, **kwargs): super().__init__(target=ErrorTarget.RUNTIME, **kwargs)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/async_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import asyncio from concurrent.futures import ThreadPoolExecutor def _has_running_loop() -> bool: """Check if the current thread has a running event loop.""" # When using asyncio.get_running_loop(), a RuntimeError is raised if there is no running event loop. # So, we use a try-catch block to determine whether there is currently an event loop in place. # # Note that this is the only way to check whether there is a running loop now, see: # https://docs.python.org/3/library/asyncio-eventloop.html?highlight=get_running_loop#asyncio.get_running_loop try: asyncio.get_running_loop() return True except RuntimeError: return False def async_run_allowing_running_loop(async_func, *args, **kwargs): """Run an async function in a new thread, allowing the current thread to have a running event loop. When run in an async environment (e.g., in a notebook), because each thread allows only one event loop, using asyncio.run directly leads to a RuntimeError ("asyncio.run() cannot be called from a running event loop"). To address this issue, we add a check for the event loop here. If the current thread already has an event loop, we run _exec_batch in a new thread; otherwise, we run it in the current thread. """ if _has_running_loop(): with ThreadPoolExecutor(1) as executor: return executor.submit(lambda: asyncio.run(async_func(*args, **kwargs))).result() else: return asyncio.run(async_func(*args, **kwargs))
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/multimedia_utils.py
import base64 import os import re import uuid from functools import partial from pathlib import Path from typing import Any, Callable, Dict from urllib.parse import urlparse import requests from promptflow._utils._errors import InvalidImageInput, LoadMultimediaDataError from promptflow.contracts.flow import FlowInputDefinition from promptflow.contracts.multimedia import Image, PFBytes from promptflow.contracts.tool import ValueType from promptflow.exceptions import ErrorTarget MIME_PATTERN = re.compile(r"^data:image/(.*);(path|base64|url)$") def _get_extension_from_mime_type(mime_type: str): ext = mime_type.split("/")[-1] if ext == "*": return None return ext def is_multimedia_dict(multimedia_dict: dict): if len(multimedia_dict) != 1: return False key = list(multimedia_dict.keys())[0] if re.match(MIME_PATTERN, key): return True return False def _get_multimedia_info(key: str): match = re.match(MIME_PATTERN, key) if match: return match.group(1), match.group(2) return None, None def _is_url(value: str): try: result = urlparse(value) return all([result.scheme, result.netloc]) except ValueError: return False def _is_base64(value: str): base64_regex = re.compile(r"^([A-Za-z0-9+/]{4})*(([A-Za-z0-9+/]{2})*(==|[A-Za-z0-9+/]=)?)?$") if re.match(base64_regex, value): return True return False def _create_image_from_file(f: Path, mime_type: str = None): with open(f, "rb") as fin: return Image(fin.read(), mime_type=mime_type) def _create_image_from_base64(base64_str: str, mime_type: str = None): image_bytes = base64.b64decode(base64_str) return Image(image_bytes, mime_type=mime_type) def _create_image_from_url(url: str, mime_type: str = None): response = requests.get(url) if response.status_code == 200: return Image(response.content, mime_type=mime_type, source_url=url) else: raise InvalidImageInput( message_format="Failed to fetch image from URL: {url}. Error code: {error_code}. " "Error message: {error_message}.", target=ErrorTarget.EXECUTOR, url=url, error_code=response.status_code, error_message=response.text, ) def _create_image_from_dict(image_dict: dict): for k, v in image_dict.items(): format, resource = _get_multimedia_info(k) if resource == "path": return _create_image_from_file(Path(v), mime_type=f"image/{format}") elif resource == "base64": if _is_base64(v): return _create_image_from_base64(v, mime_type=f"image/{format}") else: raise InvalidImageInput( message_format=f"Invalid base64 image: {v}.", target=ErrorTarget.EXECUTOR, ) elif resource == "url": return _create_image_from_url(v, mime_type=f"image/{format}") else: raise InvalidImageInput( message_format=f"Unsupported image resource: {resource}. " "Supported Resources are [path, base64, url].", target=ErrorTarget.EXECUTOR, ) def _create_image_from_string(value: str): if _is_base64(value): return _create_image_from_base64(value) elif _is_url(value): return _create_image_from_url(value) else: return _create_image_from_file(Path(value)) def create_image(value: any): if isinstance(value, PFBytes): return value elif isinstance(value, dict): if is_multimedia_dict(value): return _create_image_from_dict(value) else: raise InvalidImageInput( message_format="Invalid image input format. The image input should be a dictionary like: " "{{data:image/<image_type>;[path|base64|url]: <image_data>}}.", target=ErrorTarget.EXECUTOR, ) elif isinstance(value, str): if not value: raise InvalidImageInput( message_format="The image input should not be empty.", target=ErrorTarget.EXECUTOR ) return _create_image_from_string(value) else: raise InvalidImageInput( message_format=f"Unsupported image input type: {type(value)}. " "The image inputs should be a string or a dictionary.", target=ErrorTarget.EXECUTOR, ) def _save_image_to_file( image: Image, file_name: str, folder_path: Path, relative_path: Path = None, use_absolute_path=False ): ext = _get_extension_from_mime_type(image._mime_type) file_name = f"{file_name}.{ext}" if ext else file_name image_path = (relative_path / file_name).as_posix() if relative_path else file_name if use_absolute_path: image_path = Path(folder_path / image_path).resolve().as_posix() image_reference = {f"data:{image._mime_type};path": image_path} path = folder_path / relative_path if relative_path else folder_path os.makedirs(path, exist_ok=True) with open(os.path.join(path, file_name), "wb") as file: file.write(image) return image_reference def get_file_reference_encoder(folder_path: Path, relative_path: Path = None, *, use_absolute_path=False) -> Callable: def pfbytes_file_reference_encoder(obj): """Dumps PFBytes to a file and returns its reference.""" if obj.source_url: return {f"data:{obj._mime_type};url": obj.source_url} if isinstance(obj, PFBytes): file_name = str(uuid.uuid4()) # If use_absolute_path is True, the image file path in image dictionary will be absolute path. return _save_image_to_file(obj, file_name, folder_path, relative_path, use_absolute_path) raise TypeError(f"Not supported to dump type '{type(obj).__name__}'.") return pfbytes_file_reference_encoder def default_json_encoder(obj): if isinstance(obj, PFBytes): return str(obj) else: raise TypeError(f"Object of type {type(obj).__name__} is not JSON serializable") def persist_multimedia_data(value: Any, base_dir: Path, sub_dir: Path = None): pfbytes_file_reference_encoder = get_file_reference_encoder(base_dir, sub_dir) serialization_funcs = {Image: partial(Image.serialize, **{"encoder": pfbytes_file_reference_encoder})} return _process_recursively(value, process_funcs=serialization_funcs) def convert_multimedia_data_to_base64(value: Any, with_type=False, dict_type=False): to_base64_funcs = {PFBytes: partial(PFBytes.to_base64, **{"with_type": with_type, "dict_type": dict_type})} return _process_recursively(value, process_funcs=to_base64_funcs) # TODO: Move this function to a more general place and integrate serialization to this function. def _process_recursively(value: Any, process_funcs: Dict[type, Callable] = None, inplace: bool = False) -> dict: if process_funcs: for cls, f in process_funcs.items(): if isinstance(value, cls): return f(value) if isinstance(value, list): if inplace: for i in range(len(value)): value[i] = _process_recursively(value[i], process_funcs, inplace) else: return [_process_recursively(v, process_funcs, inplace) for v in value] elif isinstance(value, dict): if inplace: for k, v in value.items(): value[k] = _process_recursively(v, process_funcs, inplace) else: return {k: _process_recursively(v, process_funcs, inplace) for k, v in value.items()} return value def load_multimedia_data(inputs: Dict[str, FlowInputDefinition], line_inputs: dict): updated_inputs = dict(line_inputs or {}) for key, value in inputs.items(): try: if value.type == ValueType.IMAGE: if isinstance(updated_inputs[key], list): # For aggregation node, the image input is a list. updated_inputs[key] = [create_image(item) for item in updated_inputs[key]] else: updated_inputs[key] = create_image(updated_inputs[key]) elif value.type == ValueType.LIST or value.type == ValueType.OBJECT: updated_inputs[key] = load_multimedia_data_recursively(updated_inputs[key]) except Exception as ex: error_type_and_message = f"({ex.__class__.__name__}) {ex}" raise LoadMultimediaDataError( message_format="Failed to load image for input '{key}': {error_type_and_message}", key=key, error_type_and_message=error_type_and_message, target=ErrorTarget.EXECUTOR, ) from ex return updated_inputs def load_multimedia_data_recursively(value: Any): return _process_multimedia_dict_recursively(value, _create_image_from_dict) def resolve_multimedia_data_recursively(input_dir: Path, value: Any): process_func = partial(resolve_image_path, **{"input_dir": input_dir}) return _process_multimedia_dict_recursively(value, process_func) def _process_multimedia_dict_recursively(value: Any, process_func: Callable) -> dict: if isinstance(value, list): return [_process_multimedia_dict_recursively(item, process_func) for item in value] elif isinstance(value, dict): if is_multimedia_dict(value): return process_func(**{"image_dict": value}) else: return {k: _process_multimedia_dict_recursively(v, process_func) for k, v in value.items()} else: return value def resolve_image_path(input_dir: Path, image_dict: dict): """Resolve image path to absolute path in image dict""" input_dir = input_dir.parent if input_dir.is_file() else input_dir if is_multimedia_dict(image_dict): for key in image_dict: _, resource = _get_multimedia_info(key) if resource == "path": image_dict[key] = str(input_dir / image_dict[key]) return image_dict
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/dataclass_serializer.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from dataclasses import fields, is_dataclass from datetime import datetime from enum import Enum from typing import Any, Callable, Dict, List, Type, TypeVar from promptflow._core.generator_proxy import GeneratorProxy from promptflow.contracts.tool import ConnectionType T = TypeVar("T") def get_type(obj: type): if is_dataclass(obj): return obj if isinstance(obj, list): return List[get_type(obj[0])] if isinstance(obj, dict): return Dict[str, get_type(obj[list(obj.keys())[0]])] return obj def deserialize_dataclass(cls: Type[T], data: dict) -> T: if not is_dataclass(cls): raise ValueError(f"{cls} is not a dataclass") if not isinstance(data, dict): raise ValueError(f"{data} is not a dict") kwargs = {} for field in fields(cls): if field.name not in data: kwargs[field.name] = field.default continue field_type = get_type(field.type) kwargs[field.name] = deserialize_value(data[field.name], field_type) return cls(**kwargs) def deserialize_value(obj, field_type): if not isinstance(field_type, type): return obj if is_dataclass(field_type): return deserialize_dataclass(field_type, obj) if issubclass(field_type, Enum): return field_type(obj) if issubclass(field_type, datetime) and obj is not None: # Remove Z/z at the end of the string. if obj.endswith("Z") or obj.endswith("z"): return datetime.fromisoformat(obj[:-1]) return datetime.fromisoformat(obj) return obj def serialize(value: object, remove_null: bool = False, serialization_funcs: Dict[type, Callable] = None) -> dict: if serialization_funcs: for cls, f in serialization_funcs.items(): if isinstance(value, cls): return f(value) if isinstance(value, datetime): return value.isoformat() + "Z" if isinstance(value, Enum): return value.value if isinstance(value, list): return [serialize(v, remove_null, serialization_funcs) for v in value] if isinstance(value, GeneratorProxy): # TODO: The current implementation of the serialize function is not self-explanatory, as value.items is mutable # whereas the serialize function should deal with a fixed object. We should rename the function to # to_serializable to better reflect its purpose. return value.items # Note that custom connection check should before dict check if ConnectionType.is_connection_value(value): return ConnectionType.serialize_conn(value) if isinstance(value, dict): return {k: serialize(v, remove_null, serialization_funcs) for k, v in value.items()} if is_dataclass(value): if hasattr(value, "serialize"): result = value.serialize() else: result = { f.name: serialize(getattr(value, f.name), remove_null, serialization_funcs) for f in fields(value) } if not remove_null: return result null_keys = [k for k, v in result.items() if v is None] for k in null_keys: result.pop(k) return result try: from pydantic import BaseModel if isinstance(value, BaseModel): # Handle pydantic model, which is used in langchain return value.dict() except ImportError: # Ignore ImportError if pydantic is not installed pass return value def assertEqual(a: dict, b: dict, path: str = ""): if isinstance(a, dict): assert isinstance(b, dict), f"{path}: {type(a)} != {type(b)}" assert set(a.keys()) == set(b.keys()), f"{path}: {set(a.keys())} != {set(b.keys())}" for key in a.keys(): assertEqual(a[key], b[key], path + "." + key) elif isinstance(a, list): assert isinstance(b, list), f"{path}: {type(a)} != {type(b)}" assert len(a) == len(b), f"{path}: {len(a)} != {len(b)}" for i in range(len(a)): assertEqual(a[i], b[i], path + f"[{i}]") else: assert a == b, f"{path}: {a} != {b}" def convert_eager_flow_output_to_dict(value: Any): """ Convert the output of eager flow to a dict. Since the output of eager flow may not be a dict, we need to convert it to a dict in batch mode. Examples: 1. If the output is a dict, return it directly: value = {"output": 1} -> {"output": 1} 2. If the output is a dataclass, convert it to a dict: value = SampleDataClass(output=1) -> {"output": 1} 3. If the output is not a dict or dataclass, convert it to a dict by adding a key "output": value = 1 -> {"output": 1} """ if isinstance(value, dict): return value elif is_dataclass(value): return {f.name: getattr(value, f.name) for f in fields(value)} else: return {"output": value}
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/yaml_utils.py
from io import StringIO from os import PathLike from typing import IO, AnyStr, Dict, Optional, Union from ruamel.yaml import YAML, YAMLError from promptflow._constants import DEFAULT_ENCODING from promptflow._utils._errors import YamlParseError def load_yaml(source: Optional[Union[AnyStr, PathLike, IO]]) -> Dict: # null check - just return an empty dict. # Certain CLI commands rely on this behavior to produce a resource # via CLI, which is then populated through CLArgs. """Load a local YAML file or a readable stream object. .. note:: 1. For a local file yaml .. code-block:: python yaml_path = "path/to/yaml" content = load_yaml(yaml_path) 2. For a readable stream object .. code-block:: python with open("path/to/yaml", "r", encoding="utf-8") as f: content = load_yaml(f) :param source: The relative or absolute path to the local file, or a readable stream object. :type source: str :return: A dictionary representation of the local file's contents. :rtype: Dict """ if source is None: return {} # pylint: disable=redefined-builtin input = None must_open_file = False try: # check source type by duck-typing it as an IOBase readable = source.readable() if not readable: # source is misformatted stream or file msg = "File Permissions Error: The already-open \n\n inputted file is not readable." raise Exception(msg) # source is an already-open stream or file, we can read() from it directly. input = source except AttributeError: # source has no writable() function, assume it's a string or file path. must_open_file = True if must_open_file: # If supplied a file path, open it. try: input = open(source, "r", encoding=DEFAULT_ENCODING) except OSError: # FileNotFoundError introduced in Python 3 msg = "No such file or directory: {}" raise Exception(msg.format(source)) # input should now be a readable file or stream. Parse it. cfg = {} try: yaml = YAML() yaml.preserve_quotes = True cfg = yaml.load(input) except YAMLError as e: msg = f"Error while parsing yaml file: {source} \n\n {str(e)}" raise Exception(msg) finally: if must_open_file: input.close() return cfg def load_yaml_string(yaml_string: str): """Load a yaml string. .. code-block:: python yaml_string = "some yaml string" object = load_yaml_string(yaml_string) :param yaml_string: A yaml string. :type yaml_string: str """ yaml = YAML() yaml.preserve_quotes = True return yaml.load(yaml_string) def dump_yaml(*args, **kwargs): """Dump data to a yaml string or stream. .. note:: 1. Dump to a yaml string .. code-block:: python data = {"key": "value"} yaml_string = dump_yaml(data) 2. Dump to a stream .. code-block:: python data = {"key": "value"} with open("path/to/yaml", "w", encoding="utf-8") as f: dump_yaml(data, f) """ yaml = YAML() yaml.default_flow_style = False # when using with no stream parameter but just the data, dump to yaml string and return if len(args) == 1: string_stream = StringIO() yaml.dump(args[0], string_stream, **kwargs) output_string = string_stream.getvalue() string_stream.close() return output_string # when using with stream parameter, dump to stream. e.g.: # open('test.yaml', 'w', encoding='utf-8') as f: # dump_yaml(data, f) elif len(args) == 2: return yaml.dump(*args, **kwargs) else: raise YamlParseError("Only 1 or 2 positional arguments are allowed for dump yaml util function.")
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/connection_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import io import re from jinja2 import Template from .yaml_utils import dump_yaml, load_yaml_string def generate_custom_strong_type_connection_spec(cls, package, package_version): connection_spec = { "connectionCategory": "CustomKeys", "flowValueType": "CustomConnection", "connectionType": cls.__name__, "ConnectionTypeDisplayName": cls.__name__, "configSpecs": [], "module": cls.__module__, "package": package, "package_version": package_version, } for k, typ in cls.__annotations__.items(): spec = { "name": k, "displayName": k.replace("_", " ").title(), "configValueType": typ.__name__, } if hasattr(cls, k): spec["isOptional"] = getattr(cls, k, None) is not None else: spec["isOptional"] = False connection_spec["configSpecs"].append(spec) return connection_spec def generate_custom_strong_type_connection_template(cls, connection_spec, package, package_version): connection_template_str = """ $schema: https://azuremlschemas.azureedge.net/promptflow/latest/CustomStrongTypeConnection.schema.json name: "to_replace_with_connection_name" type: custom custom_type: {{ custom_type }} module: {{ module }} package: {{ package }} package_version: {{ package_version }} configs:{% for key, value in configs.items() %} {{ key }}: "{{ value -}}"{% endfor %} secrets: # must-have{% for key, value in secrets.items() %} {{ key }}: "{{ value -}}"{% endfor %} """ connection_template = Template(connection_template_str) # Extract configs and secrets configs = {} secrets = {} for spec in connection_spec["configSpecs"]: if spec["configValueType"] == "Secret": secrets[spec["name"]] = "to_replace_with_" + spec["name"].replace("-", "_") else: configs[spec["name"]] = getattr(cls, spec["name"], None) or "to_replace_with_" + spec["name"].replace( "-", "_" ) # Prepare data for template data = { "custom_type": cls.__name__, "module": cls.__module__, "package": package, "package_version": package_version, "configs": configs, "secrets": secrets, } connection_template_with_data = connection_template.render(data) connection_template_with_comments = render_comments( connection_template_with_data, cls, secrets.keys(), configs.keys() ) return connection_template_with_comments def render_comments(connection_template, cls, secrets, configs): if cls.__doc__ is not None: data = load_yaml_string(connection_template) comments_map = extract_comments_mapping(list(secrets) + list(configs), cls.__doc__) # Add comments for secret keys for key in secrets: if key in comments_map.keys(): data["secrets"].yaml_add_eol_comment(comments_map[key] + "\n", key) # Add comments for config keys for key in configs: if key in comments_map.keys(): data["configs"].yaml_add_eol_comment(comments_map[key] + "\n", key) # Dump data object back to string buf = io.StringIO() dump_yaml(data, buf) connection_template_with_comments = buf.getvalue() return connection_template_with_comments return connection_template def extract_comments_mapping(keys, doc): comments_map = {} for key in keys: try: param_pattern = rf":param {key}: (.*)" key_description = " ".join(re.findall(param_pattern, doc)) type_pattern = rf":type {key}: (.*)" key_type = " ".join(re.findall(type_pattern, doc)).rstrip(".") if key_type and key_description: comments_map[key] = " ".join([key_type + " type.", key_description]) elif key_type: comments_map[key] = key_type + " type." elif key_description: comments_map[key] = key_description except re.error: print("An error occurred when extract comments mapping.") return comments_map
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/_errors.py
from promptflow.exceptions import SystemErrorException, UserErrorException, ValidationException class InvalidImageInput(ValidationException): pass class LoadMultimediaDataError(UserErrorException): pass class YamlParseError(SystemErrorException): """Exception raised when yaml parse failed.""" pass
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/logger_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- # This file is for open source, # so it should not contain any dependency on azure or azureml-related packages. import json import logging import os import sys from contextvars import ContextVar from dataclasses import dataclass from functools import partial from typing import List, Optional from promptflow._constants import PF_LOGGING_LEVEL from promptflow._utils.credential_scrubber import CredentialScrubber from promptflow._utils.exception_utils import ExceptionPresenter from promptflow.contracts.run_mode import RunMode # The maximum length of logger name is 18 ("promptflow-runtime"). # The maximum digit length of process id is 5. Fix the field width to 7. # So fix the length of these fields in the formatter. # May need to change if logger name/process id length changes. LOG_FORMAT = "%(asctime)s %(process)7d %(name)-18s %(levelname)-8s %(message)s" DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S %z" class CredentialScrubberFormatter(logging.Formatter): """Formatter that scrubs credentials in logs.""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._default_scrubber = CredentialScrubber() self._context_var = ContextVar("credential_scrubber", default=None) @property def credential_scrubber(self): credential_scrubber = self._context_var.get() if credential_scrubber: return credential_scrubber return self._default_scrubber def set_credential_list(self, credential_list: List[str]): """Set credential list, which will be scrubbed in logs.""" credential_scrubber = CredentialScrubber() for c in credential_list: credential_scrubber.add_str(c) self._context_var.set(credential_scrubber) def clear(self): """Clear context variable.""" self._context_var.set(None) def format(self, record): """Override logging.Formatter's format method and remove credentials from log.""" s: str = super().format(record) s = self._handle_traceback(s, record) s = self._handle_customer_content(s, record) return self.credential_scrubber.scrub(s) def _handle_customer_content(self, s: str, record: logging.LogRecord) -> str: """Handle customer content in log message. Derived class can override this method to handle customer content in log. """ # If log record does not have "customer_content" field, return input logging string directly. if not hasattr(record, "customer_content"): return s customer_content = record.customer_content if isinstance(customer_content, Exception): # If customer_content is an exception, convert it to string. customer_str = self._convert_exception_to_str(customer_content) elif isinstance(customer_content, str): customer_str = customer_content else: customer_str = str(customer_content) return s.replace("{customer_content}", customer_str) def _handle_traceback(self, s: str, record: logging.LogRecord) -> str: """Interface method for handling traceback in log message. Derived class can override this method to handle traceback in log. """ return s def _convert_exception_to_str(self, ex: Exception) -> str: """Convert exception a user-friendly string.""" try: return json.dumps(ExceptionPresenter.create(ex).to_dict(include_debug_info=True), indent=2) except: # noqa: E722 return str(ex) class FileHandler: """Write compliant log to a file.""" def __init__(self, file_path: str, formatter: Optional[logging.Formatter] = None): self._stream_handler = self._get_stream_handler(file_path) if formatter is None: # Default formatter to scrub credentials in log message, exception and stack trace. self._formatter = CredentialScrubberFormatter(fmt=LOG_FORMAT, datefmt=DATETIME_FORMAT) else: self._formatter = formatter self._stream_handler.setFormatter(self._formatter) def set_credential_list(self, credential_list: List[str]): """Set credential list, which will be scrubbed in logs.""" self._formatter.set_credential_list(credential_list) def emit(self, record: logging.LogRecord): """Write logs.""" self._stream_handler.emit(record) def close(self): """Close stream handler.""" self._stream_handler.close() self._formatter.clear() def _get_stream_handler(self, file_path) -> logging.StreamHandler: """This method can be overridden by derived class to save log file in cloud.""" return logging.FileHandler(file_path, encoding="UTF-8") class FileHandlerConcurrentWrapper(logging.Handler): """Wrap context-local FileHandler instance for thread safety. A logger instance can write different log to different files in different contexts. """ def __init__(self): super().__init__() self._context_var = ContextVar("handler", default=None) @property def handler(self) -> FileHandler: return self._context_var.get() @handler.setter def handler(self, handler: FileHandler): self._context_var.set(handler) def emit(self, record: logging.LogRecord): """Override logging.Handler's emit method. Get inner file handler in current context and write log. """ stream_handler: FileHandler = self._context_var.get() if stream_handler is None: return stream_handler.emit(record) def clear(self): """Close file handler and clear context variable.""" handler: FileHandler = self._context_var.get() if handler: try: handler.close() except: # NOQA: E722 # Do nothing if handler close failed. pass self._context_var.set(None) valid_logging_level = {"CRITICAL", "FATAL", "ERROR", "WARN", "WARNING", "INFO", "DEBUG", "NOTSET"} def get_pf_logging_level(default=logging.INFO): logging_level = os.environ.get(PF_LOGGING_LEVEL, None) if logging_level not in valid_logging_level: # Fall back to info if user input is invalid. logging_level = default return logging_level def get_logger(name: str) -> logging.Logger: """Get logger used during execution.""" logger = logging.Logger(name) logger.setLevel(get_pf_logging_level()) logger.addHandler(FileHandlerConcurrentWrapper()) stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(CredentialScrubberFormatter(fmt=LOG_FORMAT, datefmt=DATETIME_FORMAT)) logger.addHandler(stdout_handler) return logger # Logs by flow_logger will only be shown in flow mode. # These logs should contain all detailed logs from executor and runtime. flow_logger = get_logger("execution.flow") # Logs by bulk_logger will only be shown in bulktest and eval modes. # These logs should contain overall progress logs and error logs. bulk_logger = get_logger("execution.bulk") # Logs by logger will be shown in all the modes above, # such as error logs. logger = get_logger("execution") logger_contexts = [] @dataclass class LogContext: """A context manager to setup logger context for input_logger, logger, flow_logger and bulk_logger.""" file_path: str # Log file path. run_mode: Optional[RunMode] = RunMode.Test credential_list: Optional[List[str]] = None # These credentials will be scrubbed in logs. input_logger: logging.Logger = None # If set, then context will also be set for input_logger. def get_initializer(self): return partial( LogContext, file_path=self.file_path, run_mode=self.run_mode, credential_list=self.credential_list ) @staticmethod def get_current() -> Optional["LogContext"]: global logger_contexts if logger_contexts: return logger_contexts[-1] return None @staticmethod def set_current(context: "LogContext"): global logger_contexts if isinstance(context, LogContext): logger_contexts.append(context) @staticmethod def clear_current(): global logger_contexts if logger_contexts: logger_contexts.pop() def __enter__(self): self._set_log_path() self._set_credential_list() LogContext.set_current(self) def __exit__(self, *args): """Clear context-local variables.""" all_logger_list = [logger, flow_logger, bulk_logger] if self.input_logger: all_logger_list.append(self.input_logger) for logger_ in all_logger_list: for handler in logger_.handlers: if isinstance(handler, FileHandlerConcurrentWrapper): handler.clear() elif isinstance(handler.formatter, CredentialScrubberFormatter): handler.formatter.clear() LogContext.clear_current() def _set_log_path(self): if not self.file_path: return logger_list = self._get_loggers_to_set_path() for logger_ in logger_list: for log_handler in logger_.handlers: if isinstance(log_handler, FileHandlerConcurrentWrapper): handler = FileHandler(self.file_path) log_handler.handler = handler def _set_credential_list(self): # Set credential list to all loggers. all_logger_list = self._get_execute_loggers_list() if self.input_logger: all_logger_list.append(self.input_logger) credential_list = self.credential_list or [] for logger_ in all_logger_list: for handler in logger_.handlers: if isinstance(handler, FileHandlerConcurrentWrapper) and handler.handler: handler.handler.set_credential_list(credential_list) elif isinstance(handler.formatter, CredentialScrubberFormatter): handler.formatter.set_credential_list(credential_list) def _get_loggers_to_set_path(self) -> List[logging.Logger]: logger_list = [logger] if self.input_logger: logger_list.append(self.input_logger) # For Batch run mode, set log path for bulk_logger, # otherwise for flow_logger. if self.run_mode == RunMode.Batch: logger_list.append(bulk_logger) else: logger_list.append(flow_logger) return logger_list @classmethod def _get_execute_loggers_list(cls) -> List[logging.Logger]: # return all loggers for executor return [logger, flow_logger, bulk_logger] def update_log_path(log_path: str, input_logger: logging.Logger = None): logger_list = [logger, bulk_logger, flow_logger] if input_logger: logger_list.append(input_logger) for logger_ in logger_list: update_single_log_path(log_path, logger_) def update_single_log_path(log_path: str, logger_: logging.Logger): for wrapper in logger_.handlers: if isinstance(wrapper, FileHandlerConcurrentWrapper): handler: FileHandler = wrapper.handler if handler: wrapper.handler = type(handler)(log_path, handler._formatter) def scrub_credentials(s: str): """Scrub credentials in string s. For example, for input string: "print accountkey=accountKey", the output will be: "print accountkey=**data_scrubbed**" """ for h in logger.handlers: if isinstance(h, FileHandlerConcurrentWrapper): if h.handler and h.handler._formatter: credential_scrubber = h.handler._formatter.credential_scrubber if credential_scrubber: return credential_scrubber.scrub(s) return CredentialScrubber().scrub(s) class LoggerFactory: @staticmethod def get_logger(name: str, verbosity: int = logging.INFO, target_stdout: bool = False): logger = logging.getLogger(name) logger.propagate = False # Set default logger level to debug, we are using handler level to control log by default logger.setLevel(logging.DEBUG) # Use env var at first, then use verbosity verbosity = get_pf_logging_level(default=None) or verbosity if not LoggerFactory._find_handler(logger, logging.StreamHandler): LoggerFactory._add_handler(logger, verbosity, target_stdout) # TODO: Find a more elegant way to set the logging level for azure.core.pipeline.policies._universal azure_logger = logging.getLogger("azure.core.pipeline.policies._universal") azure_logger.setLevel(logging.DEBUG) LoggerFactory._add_handler(azure_logger, logging.DEBUG, target_stdout) return logger @staticmethod def _find_handler(logger: logging.Logger, handler_type: type) -> Optional[logging.Handler]: for log_handler in logger.handlers: if isinstance(log_handler, handler_type): return log_handler return None @staticmethod def _add_handler(logger: logging.Logger, verbosity: int, target_stdout: bool = False) -> None: # set target_stdout=True can log data into sys.stdout instead of default sys.stderr, in this way # logger info and python print result can be synchronized handler = logging.StreamHandler(stream=sys.stdout) if target_stdout else logging.StreamHandler() formatter = logging.Formatter("[%(asctime)s][%(name)s][%(levelname)s] - %(message)s") handler.setFormatter(formatter) handler.setLevel(verbosity) logger.addHandler(handler) def get_cli_sdk_logger(): """Get logger used by CLI SDK.""" # cli sdk logger default logging level is WARNING # here the logger name "promptflow" is from promptflow._sdk._constants.LOGGER_NAME, # to avoid circular import error, use plain string here instead of importing from _constants # because this function is also called in _prepare_home_dir which is in _constants return LoggerFactory.get_logger("promptflow", verbosity=logging.WARNING)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/retry_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import time from functools import wraps from typing import Tuple, Type, Union from requests import Response from promptflow._utils.logger_utils import LoggerFactory logger = LoggerFactory.get_logger(__name__) def retry(exception_to_check: Union[Type[Exception], Tuple[Type[Exception], ...]], tries=4, delay=3, backoff=2): """ From https://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/ Retry calling the decorated function using an exponential backoff. http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/ original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry :param exception_to_check: the exception to check. may be a tuple of exceptions to check :type exception_to_check: Exception or tuple :param tries: number of times to try (not retry) before giving up :type tries: int :param delay: initial delay between retries in seconds :type delay: int :param backoff: backoff multiplier e.g. value of 2 will double the delay each retry :type backoff: int :param logger: log the retry action if specified :type logger: logging.Logger """ def deco_retry(f): @wraps(f) def f_retry(*args, **kwargs): retry_times, delay_seconds = tries, delay while retry_times > 1: try: logger.debug("Running %s, %d more tries to go.", str(f), retry_times) return f(*args, **kwargs) except exception_to_check: time.sleep(delay_seconds) retry_times -= 1 delay_seconds *= backoff logger.warning("%s, Retrying in %d seconds...", str(exception_to_check), delay_seconds) return f(*args, **kwargs) return f_retry # true decorator return deco_retry HTTP_SAFE_CODES = set(range(506)) - {408, 429, 500, 502, 503, 504} HTTP_RETRY_CODES = set(range(999)) - HTTP_SAFE_CODES def http_retry_wrapper(f, tries=4, delay=3, backoff=2): """ :param f: function to be retried, should return a Response object. :type f: Callable :param tries: number of times to try (not retry) before giving up :type tries: int :param delay: initial delay between retries in seconds :type delay: int :param backoff: backoff multiplier e.g. value of 2 will double the delay each retry :type backoff: int """ @wraps(f) def f_retry(*args, **kwargs): retry_times, delay_seconds = tries, delay while retry_times > 1: result = f(*args, **kwargs) if not isinstance(result, Response): logger.debug(f"Not a retryable function, expected return type {Response}, got {type(result)}.") return result if result.status_code not in HTTP_RETRY_CODES: return result logger.warning( f"Retryable error code {result.status_code} returned, retrying in {delay_seconds} seconds. " f"Function {f.__name__}, Reason: {result.reason}" ) time.sleep(delay_seconds) retry_times -= 1 delay_seconds *= backoff return f(*args, **kwargs) return f_retry
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/tool_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import importlib import inspect import logging import re from enum import Enum, EnumMeta from typing import Any, Callable, Dict, List, Union, get_args, get_origin from jinja2 import Environment, meta from promptflow._core._errors import DuplicateToolMappingError from promptflow._utils.utils import is_json_serializable from promptflow.exceptions import ErrorTarget, UserErrorException from ..contracts.tool import ConnectionType, InputDefinition, Tool, ToolFuncCallScenario, ValueType from ..contracts.types import PromptTemplate module_logger = logging.getLogger(__name__) _DEPRECATED_TOOLS = "deprecated_tools" def value_to_str(val): if val is inspect.Parameter.empty: # For empty case, default field will be skipped when dumping to json return None if val is None: # Dump default: "" in json to avoid UI validation error return "" if isinstance(val, Enum): return val.value return str(val) def resolve_annotation(anno) -> Union[str, list]: """Resolve the union annotation to type list.""" origin = get_origin(anno) if origin != Union: return anno # Optional[Type] is Union[Type, NoneType], filter NoneType out args = [arg for arg in get_args(anno) if arg != type(None)] # noqa: E721 return args[0] if len(args) == 1 else args def param_to_definition(param, gen_custom_type_conn=False) -> (InputDefinition, bool): default_value = param.default # Get value type and enum from annotation value_type = resolve_annotation(param.annotation) enum = None custom_type_conn = None # Get value type and enum from default if no annotation if default_value is not inspect.Parameter.empty and value_type == inspect.Parameter.empty: value_type = default_value.__class__ if isinstance(default_value, Enum) else type(default_value) # Extract enum for enum class if isinstance(value_type, EnumMeta): enum = [str(option.value) for option in value_type] value_type = str is_connection = False if ConnectionType.is_connection_value(value_type): if ConnectionType.is_custom_strong_type(value_type): typ = ["CustomConnection"] custom_type_conn = [value_type.__name__] else: typ = [value_type.__name__] is_connection = True elif isinstance(value_type, list): if not all(ConnectionType.is_connection_value(t) for t in value_type): typ = [ValueType.OBJECT] else: custom_connection_added = False typ = [] custom_type_conn = [] for t in value_type: # Add 'CustomConnection' to typ list when custom strong type connection exists. Collect all custom types if ConnectionType.is_custom_strong_type(t): if not custom_connection_added: custom_connection_added = True typ.append("CustomConnection") custom_type_conn.append(t.__name__) else: if t.__name__ != "CustomConnection": typ.append(t.__name__) elif not custom_connection_added: custom_connection_added = True typ.append(t.__name__) is_connection = True else: typ = [ValueType.from_type(value_type)] # 1. Do not generate custom type when generating flow.tools.json for script tool. # Extension would show custom type if it exists. While for script tool with custom strong type connection, # we still want to show 'CustomConnection' type. # 2. Generate custom connection type when resolving tool in _tool_resolver, since we rely on it to convert the # custom connection to custom strong type connection. if not gen_custom_type_conn: custom_type_conn = None return ( InputDefinition( type=typ, default=value_to_str(default_value), description=None, enum=enum, custom_type=custom_type_conn, ), is_connection, ) def function_to_interface( f: Callable, initialize_inputs=None, gen_custom_type_conn=False, skip_prompt_template=False ) -> tuple: sign = inspect.signature(f) all_inputs = {} input_defs = {} connection_types = [] # Collect all inputs from class and func if initialize_inputs: if any(k for k in initialize_inputs if k in sign.parameters): raise Exception(f'Duplicate inputs found from {f.__name__!r} and "__init__()"!') all_inputs = {**initialize_inputs} enable_kwargs = any([param.kind == inspect.Parameter.VAR_KEYWORD for _, param in sign.parameters.items()]) all_inputs.update( { k: v for k, v in sign.parameters.items() if k != "self" and v.kind != v.VAR_KEYWORD and v.kind != v.VAR_POSITIONAL # TODO: Handle these cases } ) # Resolve inputs to definitions. for k, v in all_inputs.items(): # Get value type from annotation value_type = resolve_annotation(v.annotation) if skip_prompt_template and value_type is PromptTemplate: # custom llm tool has prompt template as input, skip it continue input_def, is_connection = param_to_definition(v, gen_custom_type_conn=gen_custom_type_conn) input_defs[k] = input_def if is_connection: connection_types.append(input_def.type) outputs = {} # Note: We don't have output definition now return input_defs, outputs, connection_types, enable_kwargs def function_to_tool_definition(f: Callable, type=None, initialize_inputs=None) -> Tool: """Translate a function to tool definition. :param f: Function to be translated. :param type: Tool type :param initialize_inputs: The initialize() func inputs get by get_initialize_inputs() when function defined in class. We will merge those inputs with f() inputs. :return: The tool definition. """ if hasattr(f, "__original_function"): f = f.__original_function inputs, outputs, _, _ = function_to_interface(f, initialize_inputs) # Hack to get class name class_name = None if "." in f.__qualname__: class_name = f.__qualname__.replace(f".{f.__name__}", "") meta_dict = { "name": f.__qualname__, "description": inspect.getdoc(f) or None, "inputs": inputs, "outputs": outputs, "class_name": class_name, "function": f.__name__, } return Tool(type=type, module=f.__module__, **meta_dict, is_builtin=True, stage="test") def get_inputs_for_prompt_template(template_str): """Get all input variable names and definitions from a jinja2 template string. : param template_str: template string : type t: str : return: the input name to InputDefinition dict : rtype t: Dict[str, ~promptflow.contracts.tool.InputDefinition] Example: >>> get_inputs_for_prompt_template( template_str="A simple prompt with no variables" ) {} >>> get_inputs_for_prompt_template( template_str="Prompt with only one string input {{str_input}}" ) {"str_input": InputDefinition(type=[ValueType.STRING])} >>> get_inputs_for_prompt_template( template_str="Prompt with image input ![image]({{image_input}}) and string input {{str_input}}" ) {"image_input": InputDefinition(type=[ValueType.IMAGE]), "str_input": InputDefinition(type=[ValueType.STRING]) """ env = Environment() template = env.parse(template_str) inputs = sorted(meta.find_undeclared_variables(template), key=lambda x: template_str.find(x)) result_dict = {i: InputDefinition(type=[ValueType.STRING]) for i in inputs} # currently we only support image type pattern = r"\!\[(\s*image\s*)\]\(\{\{\s*([^{}]+)\s*\}\}\)" matches = re.finditer(pattern, template_str) for match in matches: input_name = match.group(2).strip() result_dict[input_name] = InputDefinition([ValueType(match.group(1).strip())]) return result_dict def get_prompt_param_name_from_func(f): """Get the param name of prompt template on provider.""" return next((k for k, annotation in f.__annotations__.items() if annotation == PromptTemplate), None) def validate_dynamic_list_func_response_type(response: Any, f: str): """Verify response type is correct. The response is a list of items. Each item is a dict with the following keys: - value: for backend use. Required. - display_value: for UI display. Optional. - hyperlink: external link. Optional. - description: information icon tip. Optional. The response can not be empty. """ if not response: raise ListFunctionResponseError(f"{f} response can not be empty.") if not isinstance(response, List): raise ListFunctionResponseError(f"{f} response must be a list.") for item in response: if not isinstance(item, Dict): raise ListFunctionResponseError(f"{f} response must be a list of dict. {item} is not a dict.") if "value" not in item: raise ListFunctionResponseError(f"{f} response dict must have 'value' key.") for key, value in item.items(): if not isinstance(key, str): raise ListFunctionResponseError(f"{f} response dict key must be a string. {key} is not a string.") if not is_json_serializable(value): raise ListFunctionResponseError(f"{f} response dict value {value} is not json serializable.") if not isinstance(value, (str, int, float, list, Dict)): raise ListFunctionResponseError( f"{f} response dict value must be a string, int, float, list or dict. {value} is not supported." ) def validate_tool_func_result(func_call_scenario: str, result): if func_call_scenario == ToolFuncCallScenario.REVERSE_GENERATED_BY: if not isinstance(result, Dict): raise RetrieveToolFuncResultValidationError( f"ToolFuncCallScenario {func_call_scenario} response must be a dict. " f"{result} is not a dict." ) elif func_call_scenario == ToolFuncCallScenario.DYNAMIC_LIST: validate_dynamic_list_func_response_type(result, f"ToolFuncCallScenario {func_call_scenario}") def append_workspace_triple_to_func_input_params( func_sig_params: Dict, func_input_params_dict: Dict, ws_triple_dict: Dict[str, str] ): """Append workspace triple to func input params. :param func_sig_params: function signature parameters, full params. :param func_input_params_dict: user input param key-values for dynamic list function. :param ws_triple_dict: workspace triple dict, including subscription_id, resource_group_name, workspace_name. :return: combined func input params. """ # append workspace triple to func input params if any below condition are met: # 1. func signature has kwargs param. # 2. func signature has param named 'subscription_id','resource_group_name','workspace_name'. ws_triple_dict = ws_triple_dict if ws_triple_dict is not None else {} func_input_params_dict = func_input_params_dict if func_input_params_dict is not None else {} has_kwargs_param = any([param.kind == inspect.Parameter.VAR_KEYWORD for _, param in func_sig_params.items()]) if has_kwargs_param is False: # keep only params that are in func signature. Or run into error when calling func. avail_ws_info_dict = {k: v for k, v in ws_triple_dict.items() if k in set(func_sig_params.keys())} else: avail_ws_info_dict = ws_triple_dict # if ws triple key is in func input params, it means user has provided value for it, # do not expect implicit override. combined_func_input_params = dict(avail_ws_info_dict, **func_input_params_dict) return combined_func_input_params def load_function_from_function_path(func_path: str): """Load a function from a function path. The function path should be in the format of "module_name.function_name". """ try: module_name, func_name = func_path.rsplit(".", 1) module = importlib.import_module(module_name) f = getattr(module, func_name) if callable(f): return f else: raise FunctionPathValidationError(f"'{f}' is not callable.") except Exception as e: raise FunctionPathValidationError( f"Failed to parse function from function path: '{func_path}'. Expected format: format 'my_module.my_func'. " f"Detailed error: {e}" ) # Handling backward compatibility and generating a mapping between the previous and new tool IDs. def _find_deprecated_tools(package_tools) -> Dict[str, str]: _deprecated_tools = {} for tool_id, tool in package_tools.items(): # a list of old tool IDs that are mapped to the current tool ID. if tool and _DEPRECATED_TOOLS in tool: for old_tool_id in tool[_DEPRECATED_TOOLS]: # throw error to prompt user for manual resolution of this conflict, ensuring secure operation. if old_tool_id in _deprecated_tools: raise DuplicateToolMappingError( message_format=( "The tools '{first_tool_id}', '{second_tool_id}' are both linked to the deprecated " "tool ID '{deprecated_tool_id}'. To ensure secure operation, please either " "remove or adjust one of these tools in your environment and fix this conflict." ), first_tool_id=_deprecated_tools[old_tool_id], second_tool_id=tool_id, deprecated_tool_id=old_tool_id, target=ErrorTarget.TOOL, ) _deprecated_tools[old_tool_id] = tool_id return _deprecated_tools def _get_function_path(function): # Validate function exist if isinstance(function, str): module_name, func_name = function.rsplit(".", 1) module = importlib.import_module(module_name) func = getattr(module, func_name) func_path = function elif isinstance(function, Callable): func = function func_path = f"{function.__module__}.{function.__name__}" else: raise UserErrorException("Function has invalid type, please provide callable or function name for function.") return func, func_path class RetrieveToolFuncResultError(UserErrorException): """Base exception raised for retreive tool func result errors.""" def __init__(self, message): msg = ( f"Unable to retreive tool func result due to '{message}'. \nPlease contact the tool author/support team " f"for troubleshooting assistance." ) super().__init__(msg, target=ErrorTarget.FUNCTION_PATH) class RetrieveToolFuncResultValidationError(RetrieveToolFuncResultError): pass class DynamicListError(UserErrorException): """Base exception raised for dynamic list errors.""" def __init__(self, message): msg = ( f"Unable to display list of items due to '{message}'. \nPlease contact the tool author/support team " f"for troubleshooting assistance." ) super().__init__(msg, target=ErrorTarget.FUNCTION_PATH) class ListFunctionResponseError(DynamicListError): pass class FunctionPathValidationError(DynamicListError): pass
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/exception_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import json import os from datetime import datetime from enum import Enum from traceback import TracebackException, format_tb from types import TracebackType, FrameType from promptflow.exceptions import PromptflowException, SystemErrorException, UserErrorException, ValidationException ADDITIONAL_INFO_USER_EXECUTION_ERROR = "ToolExecutionErrorDetails" ADDITIONAL_INFO_USER_CODE_STACKTRACE = "UserCodeStackTrace" CAUSE_MESSAGE = "\nThe above exception was the direct cause of the following exception:\n\n" CONTEXT_MESSAGE = "\nDuring handling of the above exception, another exception occurred:\n\n" TRACEBACK_MESSAGE = "Traceback (most recent call last):\n" class RootErrorCode: USER_ERROR = "UserError" SYSTEM_ERROR = "SystemError" class ResponseCode(str, Enum): SUCCESS = "200" ACCEPTED = "202" REDIRECTION = "300" CLIENT_ERROR = "400" SERVICE_ERROR = "500" UNKNOWN = "0" class ErrorResponse: """A class that represents the response body when an error occurs. It follows the following specification: https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses """ def __init__(self, error_dict): self._error_dict = error_dict @staticmethod def from_error_dict(error_dict): """Create an ErrorResponse from an error dict. The error dict which usually is generated by ExceptionPresenter.create(exception).to_dict() """ return ErrorResponse(error_dict) @staticmethod def from_exception(ex: Exception, *, include_debug_info=False): presenter = ExceptionPresenter.create(ex) error_dict = presenter.to_dict(include_debug_info=include_debug_info) return ErrorResponse(error_dict) @property def message(self): return self._error_dict.get("message", "") @property def response_code(self): """Given the error code, return the corresponding http response code.""" root_error_code = self._error_dict.get("code") return ResponseCode.CLIENT_ERROR if root_error_code == RootErrorCode.USER_ERROR else ResponseCode.SERVICE_ERROR @property def additional_info(self): """Return the additional info of the error. The additional info is defined in the error response. It is stored as a list of dict, each of which contains a "type" and "info" field. We change the list of dict to a dict of dict for easier access. """ result = {} list_of_dict = self._error_dict.get("additionalInfo") if not list_of_dict or not isinstance(list_of_dict, list): return result for item in list_of_dict: # We just ignore the item if it is not a dict or does not contain the required fields. if not isinstance(item, dict): continue name = item.get("type") info = item.get("info") if not name or not info: continue result[name] = info return result def get_additional_info(self, name): """Get the additional info by name.""" return self.additional_info.get(name) def get_user_execution_error_info(self): """Get user tool execution error info from additional info.""" user_execution_error_info = self.get_additional_info(ADDITIONAL_INFO_USER_EXECUTION_ERROR) if not user_execution_error_info or not isinstance(user_execution_error_info, dict): return {} return user_execution_error_info def to_dict(self): from promptflow._core.operation_context import OperationContext return { "error": self._error_dict, "correlation": None, # TODO: to be implemented "environment": None, # TODO: to be implemented "location": None, # TODO: to be implemented "componentName": OperationContext.get_instance().get_user_agent(), "time": datetime.utcnow().isoformat(), } def to_simplified_dict(self): return { "error": { "code": self._error_dict.get("code"), "message": self._error_dict.get("message"), } } @property def error_codes(self): error = self._error_dict error_codes = [] while error is not None: code = error.get("code") if code is not None: error_codes.append(code) error = error.get("innerError") else: break return error_codes @property def error_code_hierarchy(self): """Get the code hierarchy from error dict.""" return "/".join(self.error_codes) @property def innermost_error_code(self): error_codes = self.error_codes if error_codes: return error_codes[-1] return None class ExceptionPresenter: """A class that can extract information from the exception instance. It is designed to work for both PromptflowException and other exceptions. """ def __init__(self, ex: Exception): self._ex = ex @staticmethod def create(ex: Exception): if isinstance(ex, PromptflowException): return PromptflowExceptionPresenter(ex) return ExceptionPresenter(ex) @property def formatted_traceback(self): te = TracebackException.from_exception(self._ex) return "".join(te.format()) @property def debug_info(self): return self.build_debug_info(self._ex) def build_debug_info(self, ex: Exception): inner_exception: dict = None stack_trace = TRACEBACK_MESSAGE + "".join(format_tb(ex.__traceback__)) if ex.__cause__ is not None: inner_exception = self.build_debug_info(ex.__cause__) stack_trace = CAUSE_MESSAGE + stack_trace elif ex.__context__ is not None and not ex.__suppress_context__: inner_exception = self.build_debug_info(ex.__context__) stack_trace = CONTEXT_MESSAGE + stack_trace return { "type": ex.__class__.__qualname__, "message": str(ex), "stackTrace": stack_trace, "innerException": inner_exception, } @property def error_codes(self): """The hierarchy of the error codes. We follow the "Microsoft REST API Guidelines" to define error codes in a hierarchy style. See the below link for details: https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses This method returns the error codes in a list. It will be converted into a nested json format by error_code_recursed. """ return [infer_error_code_from_class(SystemErrorException), self._ex.__class__.__name__] @property def error_code_recursed(self): """Returns a dict of the error codes for this exception. It is populated in a recursive manner, using the source from `error_codes` property. i.e. For PromptflowException, such as ToolExcutionError which inherits from UserErrorException, The result would be: { "code": "UserError", "innerError": { "code": "ToolExecutionError", "innerError": None, }, } For other exception types, such as ValueError, the result would be: { "code": "SystemError", "innerError": { "code": "ValueError", "innerError": None, }, } """ current_error = None reversed_error_codes = reversed(self.error_codes) if self.error_codes else [] for code in reversed_error_codes: current_error = { "code": code, "innerError": current_error, } return current_error def to_dict(self, *, include_debug_info=False): """Return a dict representation of the exception. This dict specification corresponds to the specification of the Microsoft API Guidelines: https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses Note that this dict represents the "error" field in the response body of the API. The whole error response is then populated in another place outside of this class. """ if isinstance(self._ex, JsonSerializedPromptflowException): return self._ex.to_dict(include_debug_info=include_debug_info) # Otherwise, return general dict representation of the exception. result = {"message": str(self._ex), "messageFormat": "", "messageParameters": {}} result.update(self.error_code_recursed) if include_debug_info: result["debugInfo"] = self.debug_info return result class PromptflowExceptionPresenter(ExceptionPresenter): @property def error_codes(self): """The hierarchy of the error codes. We follow the "Microsoft REST API Guidelines" to define error codes in a hierarchy style. See the below link for details: https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses For subclass of PromptflowException, use the ex.error_codes directly. For PromptflowException (not a subclass), the ex.error_code is None. The result should be: ["SystemError", {inner_exception type name if exist}] """ if self._ex.error_codes: return self._ex.error_codes # For PromptflowException (not a subclass), the ex.error_code is None. # Handle this case specifically. error_codes = [infer_error_code_from_class(SystemErrorException)] if self._ex.inner_exception: error_codes.append(infer_error_code_from_class(self._ex.inner_exception.__class__)) return error_codes def to_dict(self, *, include_debug_info=False): result = { "message": self._ex.message, "messageFormat": self._ex.message_format, "messageParameters": self._ex.serializable_message_parameters, "referenceCode": self._ex.reference_code, } result.update(self.error_code_recursed) if self._ex.additional_info: result["additionalInfo"] = [{"type": k, "info": v} for k, v in self._ex.additional_info.items()] if include_debug_info: result["debugInfo"] = self.debug_info return result class JsonSerializedPromptflowException(Exception): """Json serialized PromptflowException. This exception only has one argument message to avoid the argument missing error when load/dump with pickle in multiprocessing. Ref: https://bugs.python.org/issue32696 :param message: A Json serialized message describing the error. :type message: str """ def __init__(self, message): self.message = message super().__init__(self.message) def __str__(self): return self.message def to_dict(self, *, include_debug_info=False): # Return a dict representation of the inner exception. error_dict = json.loads(self.message) # The original serialized error might contain debugInfo. # We pop it out if include_debug_info is set to False. if not include_debug_info and "debugInfo" in error_dict: error_dict.pop("debugInfo") return error_dict def get_tb_next(tb: TracebackType, next_cnt: int): """Return the nth tb_next of input tb. If the tb does not have n tb_next, return the last tb which has a value. n = next_cnt """ while tb.tb_next and next_cnt > 0: tb = tb.tb_next next_cnt -= 1 return tb def last_frame_info(ex: Exception): """Return the line number where the error occurred.""" if ex: tb = TracebackException.from_exception(ex) last_frame = tb.stack[-1] if tb.stack else None if last_frame: return { "filename": last_frame.filename, "lineno": last_frame.lineno, "name": last_frame.name, } return {} def infer_error_code_from_class(cls): # Python has a built-in SystemError if cls == SystemErrorException: return RootErrorCode.SYSTEM_ERROR if cls == UserErrorException: return RootErrorCode.USER_ERROR if cls == ValidationException: return "ValidationError" return cls.__name__ def is_pf_core_frame(frame: FrameType): """Check if the frame is from promptflow core code.""" from promptflow import _core folder_of_core = os.path.dirname(_core.__file__) return folder_of_core in frame.f_code.co_filename def remove_suffix(text: str, suffix: str = None): """ Given a string, removes specified suffix, if it has. >>> remove_suffix('hello world', 'world') 'hello ' >>> remove_suffix('hello world', 'hello ') 'hello world' >>> remove_suffix('NoColumnFoundError', 'Error') 'NoColumnFound' :param text: string from which prefix will be removed. :param suffix: suffix to be removed. :return: string removed suffix. """ if not text or not suffix: return text if not text.endswith(suffix): return text return text[:-len(suffix)]
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/credential_scrubber.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import re class CredentialScrubber: """Scrub sensitive information in string.""" PLACE_HOLDER = "**data_scrubbed**" LENGTH_THRESHOLD = 2 def __init__(self): self.default_regex_set = set( [ r"(?<=sig=)[^\s;&]+", # Replace signature. r"(?<=key=)[^\s;&]+", # Replace key. ] ) self.default_str_set = set() self.custom_regex_set = set() self.custom_str_set = set() def scrub(self, input: str): """Replace sensitive information in input string with PLACE_HOLDER. For example, for input string: "print accountkey=accountKey", the output will be: "print accountkey=**data_scrubbed**" """ output = input regex_set = self.default_regex_set.union(self.custom_regex_set) for regex in regex_set: output = re.sub(regex, self.PLACE_HOLDER, output, flags=re.IGNORECASE) str_set = self.default_str_set.union(self.custom_str_set) for s in str_set: output = output.replace(s, self.PLACE_HOLDER) return output def add_regex(self, pattern: str): # policy: http://policheck.azurewebsites.net/Pages/TermInfo.aspx?LCID=9&TermID=79458 """Add regex pattern to checklist.""" self.custom_regex_set.add(pattern) def add_str(self, s: str): """Add string to checklist. Only scrub string with length > LENGTH_THRESHOLD. """ if s is None: return if len(s) <= self.LENGTH_THRESHOLD: return self.custom_str_set.add(s) def clear(self): """Clear custom regex and string set.""" self.custom_regex_set = set() self.custom_str_set = set()
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/openai_injector.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import asyncio import functools import importlib import inspect import logging import os from datetime import datetime from importlib.metadata import version import openai from promptflow._core.operation_context import OperationContext from promptflow.contracts.trace import Trace, TraceType from .tracer import Tracer USER_AGENT_HEADER = "x-ms-useragent" PROMPTFLOW_PREFIX = "ms-azure-ai-promptflow-" IS_LEGACY_OPENAI = version("openai").startswith("0.") def inject_function_async(args_to_ignore=None, trace_type=TraceType.LLM): args_to_ignore = args_to_ignore or [] args_to_ignore = set(args_to_ignore) def wrapper(f): sig = inspect.signature(f).parameters @functools.wraps(f) async def wrapped_method(*args, **kwargs): if not Tracer.active(): return await f(*args, **kwargs) all_kwargs = {**{k: v for k, v in zip(sig.keys(), args)}, **kwargs} for key in args_to_ignore: all_kwargs.pop(key, None) name = f.__qualname__ if not f.__module__ else f.__module__ + "." + f.__qualname__ trace = Trace( name=name, type=trace_type, inputs=all_kwargs, start_time=datetime.utcnow().timestamp(), ) Tracer.push(trace) try: result = await f(*args, **kwargs) except Exception as ex: Tracer.pop(error=ex) raise else: result = Tracer.pop(result) return result return wrapped_method return wrapper def inject_function_sync(args_to_ignore=None, trace_type=TraceType.LLM): args_to_ignore = args_to_ignore or [] args_to_ignore = set(args_to_ignore) def wrapper(f): sig = inspect.signature(f).parameters @functools.wraps(f) def wrapped_method(*args, **kwargs): if not Tracer.active(): return f(*args, **kwargs) all_kwargs = {**{k: v for k, v in zip(sig.keys(), args)}, **kwargs} for key in args_to_ignore: all_kwargs.pop(key, None) name = f.__qualname__ if not f.__module__ else f.__module__ + "." + f.__qualname__ trace = Trace( name=name, type=trace_type, inputs=all_kwargs, start_time=datetime.utcnow().timestamp(), ) Tracer.push(trace) try: result = f(*args, **kwargs) except Exception as ex: Tracer.pop(error=ex) raise else: result = Tracer.pop(result) return result return wrapped_method return wrapper def get_aoai_telemetry_headers() -> dict: """Get the http headers for AOAI request. The header, whose name starts with "ms-azure-ai-" or "x-ms-", is used to track the request in AOAI. The value in this dict will be recorded as telemetry, so please do not put any sensitive information in it. Returns: A dictionary of http headers. """ # get promptflow info from operation context operation_context = OperationContext.get_instance() context_info = operation_context.get_context_dict() promptflow_info = {k.replace("_", "-"): v for k, v in context_info.items()} # init headers headers = {USER_AGENT_HEADER: operation_context.get_user_agent()} # update header with promptflow info headers.update({f"{PROMPTFLOW_PREFIX}{k}": str(v) if v is not None else "" for k, v in promptflow_info.items()}) return headers def inject_operation_headers(f): def inject_headers(kwargs): # Inject headers from operation context, overwrite injected header with headers from kwargs. injected_headers = get_aoai_telemetry_headers() original_headers = kwargs.get("headers" if IS_LEGACY_OPENAI else "extra_headers") if original_headers and isinstance(original_headers, dict): injected_headers.update(original_headers) kwargs["headers" if IS_LEGACY_OPENAI else "extra_headers"] = injected_headers if asyncio.iscoroutinefunction(f): @functools.wraps(f) async def wrapper(*args, **kwargs): inject_headers(kwargs) return await f(*args, **kwargs) else: @functools.wraps(f) def wrapper(*args, **kwargs): inject_headers(kwargs) return f(*args, **kwargs) return wrapper def inject_async(f): wrapper_fun = inject_operation_headers((inject_function_async(["api_key", "headers", "extra_headers"])(f))) wrapper_fun._original = f return wrapper_fun def inject_sync(f): wrapper_fun = inject_operation_headers((inject_function_sync(["api_key", "headers", "extra_headers"])(f))) wrapper_fun._original = f return wrapper_fun def _openai_api_list(): if IS_LEGACY_OPENAI: sync_apis = ( ("openai", "Completion", "create"), ("openai", "ChatCompletion", "create"), ("openai", "Embedding", "create"), ) async_apis = ( ("openai", "Completion", "acreate"), ("openai", "ChatCompletion", "acreate"), ("openai", "Embedding", "acreate"), ) else: sync_apis = ( ("openai.resources.chat", "Completions", "create"), ("openai.resources", "Completions", "create"), ("openai.resources", "Embeddings", "create"), ) async_apis = ( ("openai.resources.chat", "AsyncCompletions", "create"), ("openai.resources", "AsyncCompletions", "create"), ("openai.resources", "AsyncEmbeddings", "create"), ) yield sync_apis, inject_sync yield async_apis, inject_async def _generate_api_and_injector(apis): for apis, injector in apis: for module_name, class_name, method_name in apis: try: module = importlib.import_module(module_name) api = getattr(module, class_name) if hasattr(api, method_name): yield api, method_name, injector except AttributeError as e: # Log the attribute exception with the missing class information logging.warning( f"AttributeError: The module '{module_name}' does not have the class '{class_name}'. {str(e)}" ) except Exception as e: # Log other exceptions as a warning, as we're not sure what they might be logging.warning(f"An unexpected error occurred: {str(e)}") def available_openai_apis_and_injectors(): """ Generates a sequence of tuples containing OpenAI API classes, method names, and corresponding injector functions based on whether the legacy OpenAI interface is used. This function handles the discrepancy reported in https://github.com/openai/openai-python/issues/996, where async interfaces were not recognized as coroutines. It ensures that decorators are applied correctly to both synchronous and asynchronous methods. Yields: Tuples of (api_class, method_name, injector_function) """ yield from _generate_api_and_injector(_openai_api_list()) def inject_openai_api(): """This function: 1. Modifies the create methods of the OpenAI API classes to inject logic before calling the original methods. It stores the original methods as _original attributes of the create methods. 2. Updates the openai api configs from environment variables. """ for api, method, injector in available_openai_apis_and_injectors(): # Check if the create method of the openai_api class has already been modified if not hasattr(getattr(api, method), "_original"): setattr(api, method, injector(getattr(api, method))) if IS_LEGACY_OPENAI: # For the openai versions lower than 1.0.0, it reads api configs from environment variables only at # import time. So we need to update the openai api configs from environment variables here. # Please refer to this issue: https://github.com/openai/openai-python/issues/557. # The issue has been fixed in openai>=1.0.0. openai.api_key = os.environ.get("OPENAI_API_KEY", openai.api_key) openai.api_key_path = os.environ.get("OPENAI_API_KEY_PATH", openai.api_key_path) openai.organization = os.environ.get("OPENAI_ORGANIZATION", openai.organization) openai.api_base = os.environ.get("OPENAI_API_BASE", openai.api_base) openai.api_type = os.environ.get("OPENAI_API_TYPE", openai.api_type) openai.api_version = os.environ.get("OPENAI_API_VERSION", openai.api_version) def recover_openai_api(): """This function restores the original create methods of the OpenAI API classes by assigning them back from the _original attributes of the modified methods. """ for api, method, _ in available_openai_apis_and_injectors(): if hasattr(getattr(api, method), "_original"): setattr(api, method, getattr(getattr(api, method), "_original"))
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/metric_logger.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import inspect from typing import Callable class MetricLoggerManager: _instance = None def __init__(self): self._metric_loggers = [] @staticmethod def get_instance() -> "MetricLoggerManager": if MetricLoggerManager._instance is None: MetricLoggerManager._instance = MetricLoggerManager() return MetricLoggerManager._instance def log_metric(self, key, value, variant_id=None): for logger in self._metric_loggers: if len(inspect.signature(logger).parameters) == 2: logger(key, value) # If the logger only accepts two parameters, we don't pass variant_id else: logger(key, value, variant_id) def add_metric_logger(self, logger_func: Callable): existing_logger = next((logger for logger in self._metric_loggers if logger is logger_func), None) if existing_logger: return if not callable(logger_func): return sign = inspect.signature(logger_func) # We accept two kinds of metric loggers: # def log_metric(k, v) # def log_metric(k, v, variant_id) if len(sign.parameters) not in [2, 3]: return self._metric_loggers.append(logger_func) def remove_metric_logger(self, logger_func: Callable): self._metric_loggers.remove(logger_func) def log_metric(key, value, variant_id=None): """Log a metric for current promptflow run. :param key: Metric name. :type key: str :param value: Metric value. :type value: float :param variant_id: Variant id for the metric. :type variant_id: str """ MetricLoggerManager.get_instance().log_metric(key, value, variant_id) def add_metric_logger(logger_func: Callable): MetricLoggerManager.get_instance().add_metric_logger(logger_func) def remove_metric_logger(logger_func: Callable): MetricLoggerManager.get_instance().remove_metric_logger(logger_func)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/run_tracker.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import asyncio import json from contextvars import ContextVar from datetime import datetime, timezone from types import GeneratorType from typing import Any, Dict, List, Mapping, Optional, Union from promptflow._core._errors import FlowOutputUnserializable, RunRecordNotFound, ToolCanceledError from promptflow._core.log_manager import NodeLogManager from promptflow._core.thread_local_singleton import ThreadLocalSingleton from promptflow._utils.dataclass_serializer import serialize from promptflow._utils.exception_utils import ExceptionPresenter from promptflow._utils.logger_utils import flow_logger from promptflow._utils.multimedia_utils import default_json_encoder from promptflow._utils.openai_metrics_calculator import OpenAIMetricsCalculator from promptflow.contracts.run_info import FlowRunInfo, RunInfo, Status from promptflow.contracts.run_mode import RunMode from promptflow.contracts.tool import ConnectionType from promptflow.exceptions import ErrorTarget from promptflow.storage import AbstractRunStorage from promptflow.storage._run_storage import DummyRunStorage class RunTracker(ThreadLocalSingleton): RUN_CONTEXT_NAME = "CurrentRun" CONTEXT_VAR_NAME = "RunTracker" context_var = ContextVar(CONTEXT_VAR_NAME, default=None) @staticmethod def init_dummy() -> "RunTracker": return RunTracker(DummyRunStorage()) def __init__(self, run_storage: AbstractRunStorage, run_mode: RunMode = RunMode.Test, node_log_manager=None): self._node_runs: Dict[str, RunInfo] = {} self._flow_runs: Dict[str, FlowRunInfo] = {} self._current_run_id = "" self._run_context = ContextVar(self.RUN_CONTEXT_NAME, default="") self._storage = run_storage self._debug = True # TODO: Make this configurable self.node_log_manager = node_log_manager or NodeLogManager() self._has_failed_root_run = False self._run_mode = run_mode self._allow_generator_types = False @property def allow_generator_types(self): return self._allow_generator_types @allow_generator_types.setter def allow_generator_types(self, value: bool): self._allow_generator_types = value @property def node_run_list(self): # Add list() to make node_run_list a new list object, # therefore avoid iterating over a dictionary, which might be updated by another thread. return list(self._node_runs.values()) @property def flow_run_list(self): # Add list() to make flow_run_list a new list object, # therefore avoid iterating over a dictionary, which might be updated by another thread. return list(self._flow_runs.values()) def set_current_run_in_context(self, run_id: str): self._run_context.set(run_id) def get_current_run_in_context(self) -> str: return self._run_context.get() def start_flow_run( self, flow_id, root_run_id, run_id, parent_run_id="", inputs=None, index=None, variant_id="", ) -> FlowRunInfo: """Create a flow run and save to run storage on demand.""" run_info = FlowRunInfo( run_id=run_id, status=Status.Running, error=None, inputs=inputs, output=None, metrics=None, request=None, parent_run_id=parent_run_id, root_run_id=root_run_id, source_run_id=None, flow_id=flow_id, start_time=datetime.utcnow(), end_time=None, index=index, variant_id=variant_id, ) self.persist_flow_run(run_info) self._flow_runs[run_id] = run_info self._current_run_id = run_id return run_info def start_node_run( self, node, flow_run_id, parent_run_id, run_id, index, ): run_info = RunInfo( node=node, run_id=run_id, flow_run_id=flow_run_id, status=Status.Running, inputs=None, output=None, metrics=None, error=None, parent_run_id=parent_run_id, start_time=datetime.utcnow(), end_time=None, ) self._node_runs[run_id] = run_info self._current_run_id = run_id self.set_current_run_in_context(run_id) self.node_log_manager.set_node_context(run_id, node, index) return run_info def bypass_node_run( self, node, flow_run_id, parent_run_id, run_id, index, variant_id, ): run_info = RunInfo( node=node, run_id=run_id, flow_run_id=flow_run_id, parent_run_id=parent_run_id, status=Status.Bypassed, inputs=None, output=None, metrics=None, error=None, start_time=datetime.utcnow(), end_time=datetime.utcnow(), result=None, index=index, variant_id=variant_id, api_calls=[], ) self._node_runs[run_id] = run_info return run_info def _flow_run_postprocess(self, run_info: FlowRunInfo, output, ex: Optional[Exception]): if output: try: self._assert_flow_output_serializable(output) except Exception as e: output, ex = None, e self._common_postprocess(run_info, output, ex) def _update_flow_run_info_with_node_runs(self, run_info: FlowRunInfo): run_id = run_info.run_id child_run_infos = self.collect_child_node_runs(run_id) run_info.system_metrics = run_info.system_metrics or {} run_info.system_metrics.update(self.collect_metrics(child_run_infos, self.OPENAI_AGGREGATE_METRICS)) # TODO: Refactor Tracer to support flow level tracing, # then we can remove the hard-coded root level api_calls here. # It has to be a list for UI backward compatibility. # TODO: Add input, output, error to top level. Adding them would require # the same technique of handingling image and generator in Tracer, # which introduces duplicated logic. We should do it in the refactoring. start_timestamp = run_info.start_time.astimezone(timezone.utc).timestamp() if run_info.start_time else None end_timestamp = run_info.end_time.astimezone(timezone.utc).timestamp() if run_info.end_time else None run_info.api_calls = [ { "name": "flow", "node_name": "flow", "type": "Flow", "start_time": start_timestamp, "end_time": end_timestamp, "children": self._collect_traces_from_nodes(run_id), "system_metrics": run_info.system_metrics, } ] def _node_run_postprocess(self, run_info: RunInfo, output, ex: Optional[Exception]): run_id = run_info.run_id self.set_openai_metrics(run_id) logs = self.node_log_manager.get_logs(run_id) run_info.logs = logs self.node_log_manager.clear_node_context(run_id) if run_info.inputs: run_info.inputs = self._ensure_inputs_is_json_serializable(run_info.inputs, run_info.node) if output is not None: msg = f"Output of {run_info.node} is not json serializable, use str to store it." output = self._ensure_serializable_value(output, msg) self._common_postprocess(run_info, output, ex) def _common_postprocess(self, run_info, output, ex): if output is not None: # Duplicated fields for backward compatibility. run_info.result = output run_info.output = output if ex is not None: self._enrich_run_info_with_exception(run_info=run_info, ex=ex) else: run_info.status = Status.Completed run_info.end_time = datetime.utcnow() if not isinstance(run_info.start_time, datetime): flow_logger.warning( f"Run start time {run_info.start_time} for {run_info.run_id} is not a datetime object, " f"got {run_info.start_time}, type={type(run_info.start_time)}." ) else: duration = (run_info.end_time - run_info.start_time).total_seconds() run_info.system_metrics = run_info.system_metrics or {} run_info.system_metrics["duration"] = duration def cancel_node_runs(self, msg: str, flow_run_id): node_runs = self.collect_node_runs(flow_run_id) for node_run_info in node_runs: if node_run_info.status != Status.Running: continue msg = msg.rstrip(".") # Avoid duplicated "." in the end of the message. err = ToolCanceledError( message_format="Tool execution is canceled because of the error: {msg}.", msg=msg, target=ErrorTarget.EXECUTOR, ) self.end_run(node_run_info.run_id, ex=err) node_run_info.status = Status.Canceled self.persist_node_run(node_run_info) def end_run( self, run_id: str, *, result: Optional[dict] = None, ex: Optional[Exception] = None, traces: Optional[List] = None, ): run_info = self._flow_runs.get(run_id) or self._node_runs.get(run_id) if run_info is None: raise RunRecordNotFound( message_format=( "Run record with ID '{run_id}' was not tracked in promptflow execution. " "Please contact support for further assistance." ), target=ErrorTarget.RUN_TRACKER, run_id=run_id, ) # If the run is already canceled, do nothing. if run_info.status == Status.Canceled: return run_info if isinstance(run_info, FlowRunInfo): self._flow_run_postprocess(run_info, result, ex) if traces: run_info.api_calls = traces elif isinstance(run_info, RunInfo): run_info.api_calls = traces self._node_run_postprocess(run_info, result, ex) return run_info def _ensure_serializable_value(self, val, warning_msg: Optional[str] = None): if ConnectionType.is_connection_value(val): return ConnectionType.serialize_conn(val) if self.allow_generator_types and isinstance(val, GeneratorType): return str(val) try: json.dumps(val, default=default_json_encoder) return val except Exception: if not warning_msg: raise flow_logger.warning(warning_msg) return repr(val) def _ensure_inputs_is_json_serializable(self, inputs: dict, node_name: str) -> dict: return { k: self._ensure_serializable_value( v, f"Input '{k}' of {node_name} is not json serializable, use str to store it." ) for k, v in inputs.items() } def _assert_flow_output_serializable(self, output: Any) -> Any: def _wrap_serializable_error(value): try: return self._ensure_serializable_value(value) except Exception as e: # If a specific key-value pair is not serializable, raise an exception with the key. error_type_and_message = f"({e.__class__.__name__}) {e}" message_format = ( "The output '{output_name}' for flow is incorrect. The output value is not JSON serializable. " "JSON dump failed: {error_type_and_message}. Please verify your flow output and " "make sure the value serializable." ) raise FlowOutputUnserializable( message_format=message_format, target=ErrorTarget.FLOW_EXECUTOR, output_name=k, error_type_and_message=error_type_and_message, ) from e # support primitive outputs in eager mode if not isinstance(output, dict): return _wrap_serializable_error(output) serializable_output = {} for k, v in output.items(): serializable_output[k] = _wrap_serializable_error(v) return serializable_output def _enrich_run_info_with_exception(self, run_info: Union[RunInfo, FlowRunInfo], ex: Exception): """Update exception details into run info.""" # Update status to Cancelled the run terminates because of KeyboardInterruption or CancelledError. if isinstance(ex, KeyboardInterrupt) or isinstance(ex, asyncio.CancelledError): run_info.status = Status.Canceled else: run_info.error = ExceptionPresenter.create(ex).to_dict(include_debug_info=self._debug) run_info.status = Status.Failed def collect_all_run_infos_as_dicts(self) -> Mapping[str, List[Mapping[str, Any]]]: flow_runs = self.flow_run_list node_runs = self.node_run_list return { "flow_runs": [serialize(run) for run in flow_runs], "node_runs": [serialize(run) for run in node_runs], } def collect_node_runs(self, flow_run_id: Optional[str] = None) -> List[RunInfo]: """If flow_run_id is None, return all node runs.""" if flow_run_id: return [run_info for run_info in self.node_run_list if run_info.flow_run_id == flow_run_id] return [run_info for run_info in self.node_run_list] def collect_child_node_runs(self, parent_run_id: str) -> List[RunInfo]: return [run_info for run_info in self.node_run_list if run_info.parent_run_id == parent_run_id] def ensure_run_info(self, run_id: str) -> Union[RunInfo, FlowRunInfo]: run_info = self._node_runs.get(run_id) or self._flow_runs.get(run_id) if run_info is None: raise RunRecordNotFound( message_format=( "Run record with ID '{run_id}' was not tracked in promptflow execution. " "Please contact support for further assistance." ), target=ErrorTarget.RUN_TRACKER, run_id=run_id, ) return run_info def set_inputs(self, run_id: str, inputs: Mapping[str, Any]): run_info = self.ensure_run_info(run_id) run_info.inputs = inputs def set_openai_metrics(self, run_id: str): # TODO: Provide a common implementation for different internal metrics run_info = self.ensure_run_info(run_id) calls = run_info.api_calls or [] total_metrics = {} calculator = OpenAIMetricsCalculator(flow_logger) for call in calls: metrics = calculator.get_openai_metrics_from_api_call(call) calculator.merge_metrics_dict(total_metrics, metrics) run_info.system_metrics = run_info.system_metrics or {} run_info.system_metrics.update(total_metrics) def _collect_traces_from_nodes(self, run_id): child_run_infos = self.collect_child_node_runs(run_id) traces = [] for node_run_info in child_run_infos: traces.extend(node_run_info.api_calls or []) return traces OPENAI_AGGREGATE_METRICS = ["prompt_tokens", "completion_tokens", "total_tokens"] def collect_metrics(self, run_infos: List[RunInfo], aggregate_metrics: List[str] = []): if not aggregate_metrics: return {} total_metrics = {} for run_info in run_infos: if not run_info.system_metrics: continue for metric in aggregate_metrics: total_metrics[metric] = total_metrics.get(metric, 0) + run_info.system_metrics.get(metric, 0) return total_metrics def get_run(self, run_id): return self._node_runs.get(run_id) or self._flow_runs.get(run_id) def persist_node_run(self, run_info: RunInfo): self._storage.persist_node_run(run_info) def persist_selected_node_runs(self, run_info: FlowRunInfo, node_names: List[str]): """ Persists the node runs for the specified node names. :param run_info: The flow run information. :type run_info: FlowRunInfo :param node_names: The names of the nodes to persist. :type node_names: List[str] :returns: None """ run_id = run_info.run_id selected_node_run_info = ( run_info for run_info in self.collect_child_node_runs(run_id) if run_info.node in node_names ) for node_run_info in selected_node_run_info: self.persist_node_run(node_run_info) def persist_flow_run(self, run_info: FlowRunInfo): self._storage.persist_flow_run(run_info) def get_status_summary(self, run_id: str): node_run_infos = self.collect_node_runs(run_id) status_summary = {} for run_info in node_run_infos: node_name = run_info.node if run_info.index is not None: # Only consider Completed, Bypassed and Failed status, because the UX only support three status. if run_info.status in (Status.Completed, Status.Bypassed, Status.Failed): node_status_key = f"__pf__.nodes.{node_name}.{run_info.status.value.lower()}" status_summary[node_status_key] = status_summary.setdefault(node_status_key, 0) + 1 # For reduce node, the index is None. else: status_summary[f"__pf__.nodes.{node_name}.completed"] = 1 if run_info.status == Status.Completed else 0 # Runtime will start root flow run with run_id == root_run_id, # line flow run will have run id f"{root_run_id}_{line_number}" # We filter out root flow run accordingly. line_flow_run_infos = [ flow_run_info for flow_run_info in self.flow_run_list if flow_run_info.root_run_id == run_id and flow_run_info.run_id != run_id ] total_lines = len(line_flow_run_infos) completed_lines = len( [flow_run_info for flow_run_info in line_flow_run_infos if flow_run_info.status == Status.Completed] ) status_summary["__pf__.lines.completed"] = completed_lines status_summary["__pf__.lines.failed"] = total_lines - completed_lines return status_summary def persist_status_summary(self, status_summary: Dict[str, int], run_id: str): self._storage.persist_status_summary(status_summary, run_id)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/log_manager.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import sys from contextvars import ContextVar from datetime import datetime, timezone from io import StringIO, TextIOBase from typing import Dict from promptflow._utils.logger_utils import flow_logger, logger, scrub_credentials class NodeInfo: def __init__(self, run_id: str, node_name: str, line_number: int): self.run_id = run_id self.node_name = node_name self.line_number = line_number def __str__(self) -> str: return f"{self.node_name} in line {self.line_number} (index starts from 0)" class NodeLogManager: """Replace sys.stdout and sys.stderr with NodeLogWriter. This class intercepts and saves logs to stdout/stderr when executing a node. For example: with NodeLogManager() as log_manager: print('test stdout') print('test stderr', file=sys.stderr) log_manager.get_logs() will return: {'stdout': 'test stdout\n', 'stderr': 'test stderr\n'} """ def __init__(self, record_datetime=True): self.stdout_logger = NodeLogWriter(sys.stdout, record_datetime) self.stderr_logger = NodeLogWriter(sys.stderr, record_datetime, is_stderr=True) self.log_handler = None def __enter__(self): """Replace sys.stdout and sys.stderr with NodeLogWriter.""" self._prev_stdout = sys.stdout self._prev_stderr = sys.stderr sys.stdout = self.stdout_logger sys.stderr = self.stderr_logger return self def __exit__(self, *args): """Restore sys.stdout and sys.stderr.""" sys.stdout = self._prev_stdout sys.stderr = self._prev_stderr def set_node_context(self, run_id: str, node_name: str, line_number: int): """Set node context.""" self.stdout_logger.set_node_info(run_id, node_name, line_number) self.stderr_logger.set_node_info(run_id, node_name, line_number) def clear_node_context(self, run_id): """Clear node context.""" self.stdout_logger.clear_node_info(run_id) self.stderr_logger.clear_node_info(run_id) def get_logs(self, run_id) -> Dict[str, str]: return { "stdout": self.stdout_logger.get_log(run_id), "stderr": self.stderr_logger.get_log(run_id), } class NodeLogWriter(TextIOBase): """Record node run logs.""" DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z" def __init__(self, prev_stdout, record_datetime=True, is_stderr=False): self.run_id_to_stdout = dict() self._context = ContextVar("run_log_info", default=None) self._prev_out = prev_stdout self._record_datetime = record_datetime self._is_stderr = is_stderr def set_node_info(self, run_id: str, node_name: str, line_number: int = None): """Set node info to a context variable. After set node info, write method will write to stringio associated with this node. """ run_log_info = NodeInfo(run_id, node_name, line_number) self._context.set(run_log_info) self.run_id_to_stdout.update({run_id: StringIO()}) def clear_node_info(self, run_id: str): """Clear context variable associated with run id.""" log_info: NodeInfo = self._context.get() if log_info and log_info.run_id == run_id: self._context.set(None) if run_id in self.run_id_to_stdout: self.run_id_to_stdout.pop(run_id) def get_log(self, run_id: str) -> str: """Get log associated with run id.""" string_io: StringIO = self.run_id_to_stdout.get(run_id) if string_io is None: return None return string_io.getvalue() def write(self, s: str): """Override TextIO's write method and writes input string into a stringio The written string is compliant without any credentials. The string is also recorded to flow/bulk logger. If node info is not set, write to previous stdout. """ log_info: NodeInfo = self._context.get() s = scrub_credentials(s) # Remove credential from string. if log_info is None: self._prev_out.write(s) else: self._write_to_flow_log(log_info, s) stdout: StringIO = self.run_id_to_stdout.get(log_info.run_id) if self._record_datetime and s != "\n": # For line breaker, do not add datetime prefix. s = f"[{datetime.now(timezone.utc).strftime(self.DATETIME_FORMAT)}] {s}" stdout.write(s) def flush(self): """Override TextIO's flush method.""" node_info: NodeInfo = self._context.get() if node_info is None: self._prev_out.flush() else: string_io = self.run_id_to_stdout.get(node_info.run_id) if string_io is not None: string_io.flush() def _write_to_flow_log(self, log_info: NodeInfo, s: str): """Save stdout log to flow_logger and stderr log to logger.""" # If user uses "print('log message.')" to log, then # "write" method will be called twice and the second time input is only '\n'. # For this case, should not log '\n' in flow_logger. if s != "\n": if self._is_stderr: flow_log = f"[{str(log_info)}] stderr> " + s.rstrip("\n") # Log stderr in all scenarios so we can diagnose problems. logger.warning(flow_log) else: flow_log = f"[{str(log_info)}] stdout> " + s.rstrip("\n") # Log stdout only in flow mode. flow_logger.info(flow_log)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/flow_execution_context.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import asyncio import functools import inspect import logging import threading import time import uuid from contextvars import ContextVar from logging import WARNING from typing import Callable from promptflow._core._errors import ToolExecutionError, UnexpectedError from promptflow._core.cache_manager import AbstractCacheManager, CacheInfo, CacheResult from promptflow._core.operation_context import OperationContext from promptflow._utils.logger_utils import flow_logger, logger from promptflow._utils.thread_utils import RepeatLogTimer from promptflow._utils.utils import generate_elapsed_time_messages from promptflow.contracts.flow import Node from promptflow.contracts.run_info import RunInfo from promptflow.exceptions import PromptflowException from .run_tracker import RunTracker from .thread_local_singleton import ThreadLocalSingleton from .tracer import Tracer class FlowExecutionContext(ThreadLocalSingleton): """The context for a flow execution.""" CONTEXT_VAR_NAME = "Flow" context_var = ContextVar(CONTEXT_VAR_NAME, default=None) def __init__( self, name, run_tracker: RunTracker, cache_manager: AbstractCacheManager = None, run_id=None, flow_id=None, line_number=None, variant_id=None, ): self._name = name self._run_tracker = run_tracker self._cache_manager = cache_manager or AbstractCacheManager.init_from_env() self._run_id = run_id or str(uuid.uuid4()) self._flow_id = flow_id or self._run_id self._line_number = line_number self._variant_id = variant_id def copy(self): return FlowExecutionContext( name=self._name, run_tracker=self._run_tracker, cache_manager=self._cache_manager, run_id=self._run_id, flow_id=self._flow_id, line_number=self._line_number, variant_id=self._variant_id, ) def _update_operation_context(self): flow_context_info = {"flow-id": self._flow_id, "root-run-id": self._run_id} OperationContext.get_instance().update(flow_context_info) def cancel_node_runs(self, msg): self._run_tracker.cancel_node_runs(msg, self._run_id) def invoke_tool(self, node: Node, f: Callable, kwargs): run_info = self._prepare_node_run(node, f, kwargs) node_run_id = run_info.run_id traces = [] try: hit_cache = False # Get result from cache. If hit cache, no need to execute f. cache_info: CacheInfo = self._cache_manager.calculate_cache_info(self._flow_id, f, [], kwargs) if node.enable_cache and cache_info: cache_result: CacheResult = self._cache_manager.get_cache_result(cache_info) if cache_result and cache_result.hit_cache: # Assign cached_flow_run_id and cached_run_id. run_info.cached_flow_run_id = cache_result.cached_flow_run_id run_info.cached_run_id = cache_result.cached_run_id result = cache_result.result hit_cache = True if not hit_cache: Tracer.start_tracing(node_run_id, node.name) result = self._invoke_tool_with_timer(node, f, kwargs) traces = Tracer.end_tracing(node_run_id) self._run_tracker.end_run(node_run_id, result=result, traces=traces) # Record result in cache so that future run might reuse its result. if not hit_cache and node.enable_cache: self._persist_cache(cache_info, run_info) flow_logger.info(f"Node {node.name} completes.") return result except Exception as e: logger.exception(f"Node {node.name} in line {self._line_number} failed. Exception: {e}.") if not traces: traces = Tracer.end_tracing(node_run_id) self._run_tracker.end_run(node_run_id, ex=e, traces=traces) raise finally: self._run_tracker.persist_node_run(run_info) def _prepare_node_run(self, node: Node, f, kwargs={}): # Ensure this thread has a valid operation context self._update_operation_context() node_run_id = self._generate_node_run_id(node) flow_logger.info(f"Executing node {node.name}. node run id: {node_run_id}") parent_run_id = f"{self._run_id}_{self._line_number}" if self._line_number is not None else self._run_id run_info: RunInfo = self._run_tracker.start_node_run( node=node.name, flow_run_id=self._run_id, parent_run_id=parent_run_id, run_id=node_run_id, index=self._line_number, ) run_info.index = self._line_number run_info.variant_id = self._variant_id self._run_tracker.set_inputs(node_run_id, {key: value for key, value in kwargs.items() if key != "self"}) return run_info async def invoke_tool_async(self, node: Node, f: Callable, kwargs): if not inspect.iscoroutinefunction(f): raise UnexpectedError( message_format="Tool '{function}' in node '{node}' is not a coroutine function.", function=f, node=node.name, ) run_info = self._prepare_node_run(node, f, kwargs=kwargs) node_run_id = run_info.run_id traces = [] try: Tracer.start_tracing(node_run_id, node.name) result = await self._invoke_tool_async_inner(node, f, kwargs) traces = Tracer.end_tracing(node_run_id) self._run_tracker.end_run(node_run_id, result=result, traces=traces) flow_logger.info(f"Node {node.name} completes.") return result # User tool should reraise the CancelledError after its own handling logic, # so that the error can propagate to the scheduler for handling. # Otherwise, the node would end with Completed status. except asyncio.CancelledError as e: logger.info(f"Node {node.name} in line {self._line_number} is canceled.") traces = Tracer.end_tracing(node_run_id) self._run_tracker.end_run(node_run_id, ex=e, traces=traces) raise except Exception as e: logger.exception(f"Node {node.name} in line {self._line_number} failed. Exception: {e}.") traces = Tracer.end_tracing(node_run_id) self._run_tracker.end_run(node_run_id, ex=e, traces=traces) raise finally: self._run_tracker.persist_node_run(run_info) async def _invoke_tool_async_inner(self, node: Node, f: Callable, kwargs): module = f.func.__module__ if isinstance(f, functools.partial) else f.__module__ try: return await f(**kwargs) except PromptflowException as e: # All the exceptions from built-in tools are PromptflowException. # For these cases, raise the exception directly. if module is not None: e.module = module raise e except Exception as e: # Otherwise, we assume the error comes from user's tool. # For these cases, raise ToolExecutionError, which is classified as UserError # and shows stack trace in the error message to make it easy for user to troubleshoot. raise ToolExecutionError(node_name=node.name, module=module) from e def _invoke_tool_with_timer(self, node: Node, f: Callable, kwargs): module = f.func.__module__ if isinstance(f, functools.partial) else f.__module__ node_name = node.name try: logging_name = node_name if self._line_number is not None: logging_name = f"{node_name} in line {self._line_number}" interval_seconds = 60 start_time = time.perf_counter() thread_id = threading.current_thread().ident with RepeatLogTimer( interval_seconds=interval_seconds, logger=logger, level=WARNING, log_message_function=generate_elapsed_time_messages, args=(logging_name, start_time, interval_seconds, thread_id), ): return f(**kwargs) except PromptflowException as e: # All the exceptions from built-in tools are PromptflowException. # For these cases, raise the exception directly. if module is not None: e.module = module raise e except Exception as e: # Otherwise, we assume the error comes from user's tool. # For these cases, raise ToolExecutionError, which is classified as UserError # and shows stack trace in the error message to make it easy for user to troubleshoot. raise ToolExecutionError(node_name=node_name, module=module) from e def bypass_node(self, node: Node): """Update teh bypassed node run info.""" node_run_id = self._generate_node_run_id(node) flow_logger.info(f"Bypassing node {node.name}. node run id: {node_run_id}") parent_run_id = f"{self._run_id}_{self._line_number}" if self._line_number is not None else self._run_id run_info = self._run_tracker.bypass_node_run( node=node.name, flow_run_id=self._run_id, parent_run_id=parent_run_id, run_id=node_run_id, index=self._line_number, variant_id=self._variant_id, ) self._run_tracker.persist_node_run(run_info) def _persist_cache(self, cache_info: CacheInfo, run_info: RunInfo): """Record result in cache storage if hash_id is valid.""" if cache_info and cache_info.hash_id is not None and len(cache_info.hash_id) > 0: try: self._cache_manager.persist_result(run_info, cache_info, self._flow_id) except Exception as ex: # Not a critical path, swallow the exception. logging.warning(f"Failed to persist cache result. run_id: {run_info.run_id}. Exception: {ex}") def _generate_node_run_id(self, node: Node) -> str: if node.aggregation: # For reduce node, the id should be constructed by the flow run info run id return f"{self._run_id}_{node.name}_reduce" if self._line_number is None: return f"{self._run_id}_{node.name}_{uuid.uuid4()}" return f"{self._run_id}_{node.name}_{self._line_number}"
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # ---------------------------------------------------------
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/tracer.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import functools import inspect import json import logging import uuid from collections.abc import Iterator from contextvars import ContextVar from datetime import datetime from typing import Callable, Optional, Dict from promptflow._core.generator_proxy import GeneratorProxy, generate_from_proxy from promptflow._utils.dataclass_serializer import serialize from promptflow._utils.multimedia_utils import default_json_encoder from promptflow.contracts.tool import ConnectionType from promptflow.contracts.trace import Trace, TraceType from .thread_local_singleton import ThreadLocalSingleton class Tracer(ThreadLocalSingleton): CONTEXT_VAR_NAME = "Tracer" context_var = ContextVar(CONTEXT_VAR_NAME, default=None) def __init__(self, run_id, node_name: Optional[str] = None): self._run_id = run_id self._node_name = node_name self._traces = [] self._current_trace_id = ContextVar("current_trace_id", default="") self._id_to_trace: Dict[str, Trace] = {} @classmethod def start_tracing(cls, run_id, node_name: Optional[str] = None): current_run_id = cls.current_run_id() if current_run_id is not None: msg = f"Try to start tracing for run {run_id} but {current_run_id} is already active." logging.warning(msg) return tracer = cls(run_id, node_name) tracer._activate_in_context() @classmethod def current_run_id(cls): tracer = cls.active_instance() if not tracer: return None return tracer._run_id @classmethod def end_tracing(cls, run_id: Optional[str] = None, raise_ex=False): tracer = cls.active_instance() if not tracer: msg = "Try end tracing but no active tracer in current context." if raise_ex: raise Exception(msg) logging.warning(msg) return [] if run_id is not None and tracer._run_id != run_id: msg = f"Try to end tracing for run {run_id} but {tracer._run_id} is active." logging.warning(msg) return [] tracer._deactivate_in_context() return tracer.to_json() @classmethod def push(cls, trace: Trace): obj = cls.active_instance() if not obj: logging.warning("Try to push trace but no active tracer in current context.") return obj._push(trace) @staticmethod def to_serializable(obj): if isinstance(obj, dict) and all(isinstance(k, str) for k in obj.keys()): return {k: Tracer.to_serializable(v) for k, v in obj.items()} if isinstance(obj, GeneratorProxy): return obj try: obj = serialize(obj) json.dumps(obj, default=default_json_encoder) except Exception: # We don't want to fail the whole function call because of a serialization error, # so we simply convert it to str if it cannot be serialized. obj = str(obj) return obj def _get_current_trace(self): trace_id = self._current_trace_id.get() if not trace_id: return None return self._id_to_trace[trace_id] def _push(self, trace: Trace): if not trace.id: trace.id = str(uuid.uuid4()) if trace.inputs: trace.inputs = self.to_serializable(trace.inputs) trace.children = [] if not trace.start_time: trace.start_time = datetime.utcnow().timestamp() parent_trace = self._get_current_trace() if not parent_trace: self._traces.append(trace) trace.node_name = self._node_name else: parent_trace.children.append(trace) trace.parent_id = parent_trace.id self._current_trace_id.set(trace.id) self._id_to_trace[trace.id] = trace @classmethod def pop(cls, output=None, error: Optional[Exception] = None): obj = cls.active_instance() return obj._pop(output, error) def _pop(self, output=None, error: Optional[Exception] = None): last_trace = self._get_current_trace() if not last_trace: logging.warning("Try to pop trace but no active trace in current context.") return output if isinstance(output, Iterator): output = GeneratorProxy(output) if output is not None: last_trace.output = self.to_serializable(output) if error is not None: last_trace.error = self._format_error(error) last_trace.end_time = datetime.utcnow().timestamp() self._current_trace_id.set(last_trace.parent_id) if isinstance(output, GeneratorProxy): return generate_from_proxy(output) else: return output def to_json(self) -> list: return serialize(self._traces) @staticmethod def _format_error(error: Exception) -> dict: return { "message": str(error), "type": type(error).__qualname__, } def _create_trace_from_function_call(f, *, args=[], kwargs={}, trace_type=TraceType.FUNCTION): """Initialize a trace object from a function call.""" sig = inspect.signature(f).parameters all_kwargs = {**{k: v for k, v in zip(sig.keys(), args)}, **kwargs} all_kwargs = { k: ConnectionType.serialize_conn(v) if ConnectionType.is_connection_value(v) else v for k, v in all_kwargs.items() } # TODO: put parameters in self to inputs for builtin tools all_kwargs.pop("self", None) return Trace( name=f.__qualname__, type=trace_type, start_time=datetime.utcnow().timestamp(), inputs=all_kwargs, children=[], ) def _traced(func: Callable = None, *, trace_type=TraceType.FUNCTION) -> Callable: """A wrapper to add trace to a function. When a function is wrapped by this wrapper, the function name, inputs, outputs, start time, end time, and error (if any) will be recorded. It can be used for both sync and async functions. For sync functions, it will return a sync function. For async functions, it will return an async function. :param func: The function to be traced. :type func: Callable :param trace_type: The type of the trace. Defaults to TraceType.FUNCTION. :type trace_type: TraceType, optional :return: The wrapped function with trace enabled. :rtype: Callable """ def create_trace(func, args, kwargs): return _create_trace_from_function_call(func, args=args, kwargs=kwargs, trace_type=trace_type) if inspect.iscoroutinefunction(func): @functools.wraps(func) async def wrapped(*args, **kwargs): if Tracer.active_instance() is None: return await func(*args, **kwargs) # Do nothing if no tracing is enabled. # Should not extract these codes to a separate function here. # We directly call func instead of calling Tracer.invoke, # because we want to avoid long stack trace when hitting an exception. try: Tracer.push(create_trace(func, args, kwargs)) output = await func(*args, **kwargs) return Tracer.pop(output) except Exception as e: Tracer.pop(None, e) raise else: @functools.wraps(func) def wrapped(*args, **kwargs): if Tracer.active_instance() is None: return func(*args, **kwargs) # Do nothing if no tracing is enabled. # Should not extract these codes to a separate function here. # We directly call func instead of calling Tracer.invoke, # because we want to avoid long stack trace when hitting an exception. try: Tracer.push(create_trace(func, args, kwargs)) output = func(*args, **kwargs) return Tracer.pop(output) except Exception as e: Tracer.pop(None, e) raise wrapped.__original_function = func return wrapped def trace(func: Callable = None) -> Callable: """A decorator to add trace to a function. When a function is wrapped by this decorator, the function name, inputs, outputs, start time, end time, and error (if any) will be recorded. It can be used for both sync and async functions. For sync functions, it will return a sync function. For async functions, it will return an async function. :param func: The function to be traced. :type func: Callable :return: The wrapped function with trace enabled. :rtype: Callable :Examples: Synchronous function usage: .. code-block:: python @trace def greetings(user_id): name = get_name(user_id) return f"Hello, {name}" Asynchronous function usage: .. code-block:: python @trace async def greetings_async(user_id): name = await get_name_async(user_id) return f"Hello, {name}" """ return _traced(func, trace_type=TraceType.FUNCTION)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/tools_manager.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import importlib import importlib.util import inspect import logging import traceback import types from functools import partial from pathlib import Path from typing import Callable, Dict, List, Mapping, Optional, Tuple, Union from promptflow._core._errors import ( InputTypeMismatch, InvalidSource, MissingRequiredInputs, PackageToolNotFoundError, ToolLoadError, ) from promptflow._core.tool_meta_generator import ( _parse_tool_from_function, collect_tool_function_in_module, load_python_module_from_file, ) from promptflow._utils.connection_utils import ( generate_custom_strong_type_connection_spec, generate_custom_strong_type_connection_template, ) from promptflow._utils.tool_utils import ( _DEPRECATED_TOOLS, DynamicListError, RetrieveToolFuncResultError, _find_deprecated_tools, append_workspace_triple_to_func_input_params, function_to_tool_definition, get_prompt_param_name_from_func, load_function_from_function_path, validate_dynamic_list_func_response_type, validate_tool_func_result, ) from promptflow._utils.yaml_utils import load_yaml from promptflow.contracts.flow import InputAssignment, InputValueType, Node, ToolSourceType from promptflow.contracts.tool import ConnectionType, Tool, ToolType from promptflow.exceptions import ErrorTarget, SystemErrorException, UserErrorException, ValidationException module_logger = logging.getLogger(__name__) PACKAGE_TOOLS_ENTRY = "package_tools" def collect_tools_from_directory(base_dir) -> dict: tools = {} for f in Path(base_dir).glob("**/*.yaml"): with open(f, "r") as f: tools_in_file = load_yaml(f) for identifier, tool in tools_in_file.items(): tools[identifier] = tool return tools def _get_entry_points_by_group(group): # lazy load to improve performance for scenarios that don't need to load package tools import importlib.metadata # In python3.10 and later, the entry_points() method returns a SelectableView of EntryPoint objects, # which allows us to select entry points by group. In the previous versions, the entry_points() method # returns a dictionary-like object, we can use group name directly as a key. entry_points = importlib.metadata.entry_points() if isinstance(entry_points, list): return entry_points.select(group=group) else: return entry_points.get(group, []) def collect_package_tools(keys: Optional[List[str]] = None) -> dict: """Collect all tools from all installed packages.""" all_package_tools = {} if keys is not None: keys = set(keys) entry_points = _get_entry_points_by_group(PACKAGE_TOOLS_ENTRY) for entry_point in entry_points: try: list_tool_func = entry_point.load() package_tools = list_tool_func() for identifier, tool in package_tools.items(): # Only load required tools to avoid unnecessary loading when keys is provided if isinstance(keys, set) and identifier not in keys: # Support to collect new tool id if node source tool is a deprecated tool. deprecated_tool_ids = tool.get(_DEPRECATED_TOOLS, []) if not set(deprecated_tool_ids).intersection(keys): continue m = tool["module"] importlib.import_module(m) # Import the module to make sure it is valid tool["package"] = entry_point.dist.metadata["Name"] tool["package_version"] = entry_point.dist.version all_package_tools[identifier] = tool except Exception as e: msg = ( f"Failed to load tools from package {entry_point.dist.metadata['Name']}: {e}," + f" traceback: {traceback.format_exc()}" ) module_logger.warning(msg) return all_package_tools def collect_package_tools_and_connections(keys: Optional[List[str]] = None) -> dict: """Collect all tools and custom strong type connections from all installed packages.""" all_package_tools = {} all_package_connection_specs = {} all_package_connection_templates = {} if keys is not None: keys = set(keys) entry_points = _get_entry_points_by_group(PACKAGE_TOOLS_ENTRY) for entry_point in entry_points: try: list_tool_func = entry_point.load() package_tools = list_tool_func() for identifier, tool in package_tools.items(): # Only load required tools to avoid unnecessary loading when keys is provided if isinstance(keys, set) and identifier not in keys: continue m = tool["module"] module = importlib.import_module(m) # Import the module to make sure it is valid tool["package"] = entry_point.dist.metadata["Name"] tool["package_version"] = entry_point.dist.version all_package_tools[identifier] = tool # Get custom strong type connection definition custom_strong_type_connections_classes = [ obj for name, obj in inspect.getmembers(module) if inspect.isclass(obj) and ConnectionType.is_custom_strong_type(obj) and (not ConnectionType.is_connection_class_name(name)) ] if custom_strong_type_connections_classes: for cls in custom_strong_type_connections_classes: identifier = f"{cls.__module__}.{cls.__name__}" connection_spec = generate_custom_strong_type_connection_spec( cls, entry_point.dist.metadata["Name"], entry_point.dist.version ) all_package_connection_specs[identifier] = connection_spec all_package_connection_templates[identifier] = generate_custom_strong_type_connection_template( cls, connection_spec, entry_point.dist.metadata["Name"], entry_point.dist.version ) except Exception as e: msg = ( f"Failed to load tools from package {entry_point.dist.metadata['Name']}: {e}," + f" traceback: {traceback.format_exc()}" ) module_logger.warning(msg) return all_package_tools, all_package_connection_specs, all_package_connection_templates def retrieve_tool_func_result( func_call_scenario: str, func_path: str, func_input_params_dict: Dict, ws_triple_dict: Dict[str, str] = {} ): func = load_function_from_function_path(func_path) # get param names from func signature. func_sig_params = inspect.signature(func).parameters module_logger.warning(f"func_sig_params of func_path is: '{func_sig_params}'") module_logger.warning(f"func_input_params_dict is: '{func_input_params_dict}'") # Append workspace triple to func input params if func signature has kwargs param. # Or append ws_triple_dict params that are in func signature. combined_func_input_params = append_workspace_triple_to_func_input_params( func_sig_params, func_input_params_dict, ws_triple_dict ) try: result = func(**combined_func_input_params) except Exception as e: raise RetrieveToolFuncResultError(f"Error when calling function {func_path}: {e}") validate_tool_func_result(func_call_scenario, result) return result def gen_dynamic_list(func_path: str, func_input_params_dict: Dict, ws_triple_dict: Dict[str, str] = {}): func = load_function_from_function_path(func_path) # get param names from func signature. func_sig_params = inspect.signature(func).parameters module_logger.warning(f"func_sig_params of func_path is: '{func_sig_params}'") module_logger.warning(f"func_input_params_dict is: '{func_input_params_dict}'") combined_func_input_params = append_workspace_triple_to_func_input_params( func_sig_params, func_input_params_dict, ws_triple_dict ) try: result = func(**combined_func_input_params) except Exception as e: raise DynamicListError(f"Error when calling function {func_path}: {e}") # validate response is of required format. Throw correct message if response is empty. validate_dynamic_list_func_response_type(result, func.__name__) return result class BuiltinsManager: def __init__(self) -> None: pass @staticmethod def _load_llm_api(api_name: str) -> Tool: result = apis.get(api_name) if result is None: raise APINotFound( message=f"The API '{api_name}' is not found.", target=ErrorTarget.EXECUTOR, ) return result def load_builtin( self, tool: Tool, node_inputs: Optional[dict] = None, ) -> Tuple[Callable, dict]: return BuiltinsManager._load_package_tool(tool.name, tool.module, tool.class_name, tool.function, node_inputs) @staticmethod def _load_package_tool(tool_name, module_name, class_name, method_name, node_inputs): module = importlib.import_module(module_name) return BuiltinsManager._load_tool_from_module( module, tool_name, module_name, class_name, method_name, node_inputs ) @staticmethod def _load_tool_from_module( module, tool_name, module_name, class_name, method_name, node_inputs: Mapping[str, InputAssignment] ): """Load tool from given module with node inputs.""" if class_name is None: return getattr(module, method_name), {} provider_class = getattr(module, class_name) # Note: v -- type is InputAssignment init_inputs = provider_class.get_initialize_inputs() init_inputs_values = {} for k, v in node_inputs.items(): if k not in init_inputs: continue if v.value_type != InputValueType.LITERAL: raise InputTypeMismatch( message_format=( "Invalid input for '{tool_name}': Initialization input '{input_name}' requires a literal " "value, but {input_value} was received." ), tool_name=tool_name, input_name=k, input_value=v.serialize(), target=ErrorTarget.EXECUTOR, ) init_inputs_values[k] = v.value missing_inputs = set(provider_class.get_required_initialize_inputs()) - set(init_inputs_values) if missing_inputs: raise MissingRequiredInputs( message=f"Required inputs {list(missing_inputs)} are not provided for tool '{tool_name}'.", target=ErrorTarget.EXECUTOR, ) try: api = getattr(provider_class(**init_inputs_values), method_name) except Exception as ex: error_type_and_message = f"({ex.__class__.__name__}) {ex}" raise ToolLoadError( module=module_name, message_format="Failed to load package tool '{tool_name}': {error_type_and_message}", tool_name=tool_name, error_type_and_message=error_type_and_message, ) from ex # Return the init_inputs to update node inputs in the afterward steps return api, init_inputs @staticmethod def load_tool_by_api_name(api_name: str) -> Tool: if api_name is None: return None return BuiltinsManager._load_llm_api(api_name) def load_prompt_with_api(self, tool: Tool, api: Tool, node_inputs: Optional[dict] = None) -> Tuple[Callable, dict]: """Load a prompt template tool with action.""" # Load provider action function api_func, init_inputs = self.load_builtin(api, node_inputs) # Find the prompt template parameter name and parse tool code to it. prompt_tpl_param_name = get_prompt_param_name_from_func(api_func) api_func = partial(api_func, **{prompt_tpl_param_name: tool.code}) if prompt_tpl_param_name else api_func # Return the init_inputs to update node inputs in the afterward steps return api_func, init_inputs def load_prompt_rendering(self, tool: Tool): if not tool.code: tool.code = "" from promptflow.tools.template_rendering import render_template_jinja2 return partial(render_template_jinja2, template=tool.code) @staticmethod def parse_builtin_tool_method(tool: Tool) -> tuple: module_name = tool.module class_name = tool.class_name method_name = tool.function return module_name, class_name, method_name @staticmethod def is_builtin(tool: Tool) -> bool: """Check if the tool is a builtin tool.""" return tool.type == ToolType.PYTHON and tool.code is None and tool.source is None @staticmethod def is_llm(tool: Tool) -> bool: """Check if the tool is a LLM tool.""" return tool.type == ToolType.LLM @staticmethod def is_custom_python(tool: Tool) -> bool: """Check if the tool is a custom python tool.""" return tool.type == ToolType.PYTHON and not BuiltinsManager.is_builtin(tool) class ToolsManager: """Manage all builtins and user-defined tools.""" def __init__( self, loaded_tools: Optional[Mapping[str, Callable]] = None, ) -> None: loaded_tools = loaded_tools or {} self._tools = {k: v for k, v in loaded_tools.items()} def load_tools(self, tools: Mapping[str, Callable]) -> None: """Load new tools to the manager.""" self._tools.update(tools) def loaded(self, tool: str) -> bool: return tool in self._tools def get_tool(self, key: str) -> Callable: if key not in self._tools: raise ValueError(f"Tool for {key} is not loaded") return self._tools[key] def wrap_tool(self, key: str, wrapper: Callable): """Wraps the tool with specific name by a given wrapper. Sometimes we may want to wrap the tool with a decorator, but we don't want to modify the original tool. i.e. We may want to pass additional arguments to the tool by wrapping it with a decorator, such as turning on the stream response for AzureOpenAI.chat() by adding a "stream=True" argument. """ tool = self.get_tool(key) self._tools.update({key: wrapper(tool)}) def assert_loaded(self, tool: str): if tool not in self._tools: raise ValueError(f"Tool {tool} is not loaded") # TODO: Remove this method. The code path will not be used in code-first experience. # Customers are familiar with the term "node", so we use it in error message. @staticmethod def _load_custom_tool(tool: Tool, node_name: str) -> Callable: func_name = tool.function or tool.name if tool.source and Path(tool.source).exists(): # If source file is provided, load the function from the file m = load_python_module_from_file(tool.source) if m is None: raise CustomToolSourceLoadError(f"Cannot load module from source {tool.source} for node {node_name}.") return getattr(m, func_name) if not tool.code: raise EmptyCodeInCustomTool(f"Missing code in node {node_name}.") func_code = tool.code try: f_globals = {} exec(func_code, f_globals) except Exception as e: raise CustomPythonToolLoadError(f"Error when loading code of node {node_name}: {e}") from e if func_name not in f_globals: raise MissingTargetFunction(f"Cannot find function {func_name} in the code of node {node_name}.") return f_globals[func_name] class ToolLoader: def __init__(self, working_dir: str, package_tool_keys: Optional[List[str]] = None) -> None: self._working_dir = working_dir self._package_tools = collect_package_tools(package_tool_keys) if package_tool_keys else {} # Used to handle backward compatibility of tool ID changes. self._deprecated_tools = _find_deprecated_tools(self._package_tools) # TODO: Replace NotImplementedError with NotSupported in the future. def load_tool_for_node(self, node: Node) -> Tool: if node.source is None: raise UserErrorException(f"Node {node.name} does not have source defined.") if node.type == ToolType.PYTHON: if node.source.type == ToolSourceType.Package: return self.load_tool_for_package_node(node) elif node.source.type == ToolSourceType.Code: _, tool = self.load_tool_for_script_node(node) return tool raise NotImplementedError(f"Tool source type {node.source.type} for python tool is not supported yet.") elif node.type == ToolType.CUSTOM_LLM: if node.source.type == ToolSourceType.PackageWithPrompt: return self.load_tool_for_package_node(node) raise NotImplementedError(f"Tool source type {node.source.type} for custom_llm tool is not supported yet.") else: raise NotImplementedError(f"Tool type {node.type} is not supported yet.") def load_tool_for_package_node(self, node: Node) -> Tool: if node.source.tool in self._package_tools: return Tool.deserialize(self._package_tools[node.source.tool]) # If node source tool is not in package tools, try to find the tool ID in deprecated tools. # If found, load the tool with the new tool ID for backward compatibility. if node.source.tool in self._deprecated_tools: new_tool_id = self._deprecated_tools[node.source.tool] # Used to collect deprecated tool usage and warn user to replace the deprecated tool with the new one. module_logger.warning(f"Tool ID '{node.source.tool}' is deprecated. Please use '{new_tool_id}' instead.") return Tool.deserialize(self._package_tools[new_tool_id]) raise PackageToolNotFoundError( f"Package tool '{node.source.tool}' is not found in the current environment. " f"All available package tools are: {list(self._package_tools.keys())}.", target=ErrorTarget.EXECUTOR, ) def load_tool_for_script_node(self, node: Node) -> Tuple[types.ModuleType, Tool]: if node.source.path is None: raise InvalidSource( target=ErrorTarget.EXECUTOR, message_format="Load tool failed for node '{node_name}'. The source path is 'None'.", node_name=node.name, ) path = node.source.path if not (self._working_dir / path).is_file(): raise InvalidSource( target=ErrorTarget.EXECUTOR, message_format="Load tool failed for node '{node_name}'. Tool file '{source_path}' can not be found.", source_path=path, node_name=node.name, ) m = load_python_module_from_file(self._working_dir / path) if m is None: raise CustomToolSourceLoadError(f"Cannot load module from {path}.") f, init_inputs = collect_tool_function_in_module(m) return m, _parse_tool_from_function(f, init_inputs, gen_custom_type_conn=True) def load_tool_for_llm_node(self, node: Node) -> Tool: api_name = f"{node.provider}.{node.api}" return BuiltinsManager._load_llm_api(api_name) builtins = {} apis = {} connections = {} connection_type_to_api_mapping = {} def _register(provider_cls, collection, type): from promptflow._core.tool import ToolProvider if not issubclass(provider_cls, ToolProvider): raise Exception(f"Class {provider_cls.__name__!r} must be a subclass of promptflow.ToolProvider.") initialize_inputs = provider_cls.get_initialize_inputs() # Build tool/provider definition for name, value in provider_cls.__dict__.items(): if hasattr(value, "__original_function"): name = value.__original_function.__qualname__ value.__tool = function_to_tool_definition(value, type=type, initialize_inputs=initialize_inputs) collection[name] = value.__tool module_logger.debug(f"Registered {name} as a builtin function") # Get the connection type - provider name mapping for execution use # Tools/Providers related connection must have been imported for param in initialize_inputs.values(): if not param.annotation: continue annotation_type_name = param.annotation.__name__ if annotation_type_name in connections: api_name = provider_cls.__name__ module_logger.debug(f"Add connection type {annotation_type_name} to api {api_name} mapping") connection_type_to_api_mapping[annotation_type_name] = api_name break def _register_method(provider_method, collection, type): name = provider_method.__qualname__ provider_method.__tool = function_to_tool_definition(provider_method, type=type) collection[name] = provider_method.__tool module_logger.debug(f"Registered {name} as {type} function") def register_builtins(provider_cls): _register(provider_cls, builtins, ToolType.PYTHON) def register_apis(provider_cls): _register(provider_cls, apis, ToolType._ACTION) def register_builtin_method(provider_method): _register_method(provider_method, builtins, ToolType.PYTHON) def register_api_method(provider_method): _register_method(provider_method, apis, ToolType._ACTION) def register_connections(connection_classes: Union[type, List[type]]): connection_classes = [connection_classes] if not isinstance(connection_classes, list) else connection_classes connections.update({cls.__name__: cls for cls in connection_classes}) class CustomToolSourceLoadError(SystemErrorException): pass class CustomToolError(UserErrorException): """Base exception raised when failed to validate tool.""" def __init__(self, message): super().__init__(message, target=ErrorTarget.TOOL) class EmptyCodeInCustomTool(CustomToolError): pass class CustomPythonToolLoadError(CustomToolError): pass class MissingTargetFunction(CustomToolError): pass class APINotFound(ValidationException): pass
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/operation_context.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from contextvars import ContextVar from typing import Dict, Mapping from promptflow._version import VERSION class OperationContext(Dict): """The OperationContext class. This class is used to store the context information for the current operation. It is a dictionary-like class that can be used to store any primitive context information. The object is a context variable that can be accessed from anywhere in the current context. The context information is used to provide additional information to the service for logging and telemetry purposes. """ _CONTEXT_KEY = "operation_context" _current_context = ContextVar(_CONTEXT_KEY, default=None) USER_AGENT_KEY = "user_agent" @classmethod def get_instance(cls): """Get the OperationContext instance. This method returns the OperationContext instance from the current context. If there is no instance in the current context, it creates a new one and sets it in the current context. Returns: OperationContext: The OperationContext instance. """ # get the OperationContext instance from the current context instance = cls._current_context.get() if instance is None: # create a new instance and set it in the current context instance = OperationContext() cls._current_context.set(instance) return instance def __setattr__(self, name, value): """Set the attribute. This method sets an attribute with the given name and value in the OperationContext instance. The name must be a string and the value must be a primitive. Args: name (str): The name of the attribute. value (int, float, str, bool, or None): The value of the attribute. Raises: TypeError: If name is not a string or value is not a primitive. """ # check that name is a string if not isinstance(name, str): raise TypeError("Name must be a string") # check that value is a primitive if value is not None and not isinstance(value, (int, float, str, bool)): raise TypeError("Value must be a primitive") # set the item in the data attribute self[name] = value def __getattr__(self, name): """Get the attribute. This method returns the attribute with the given name from the OperationContext instance. If there is no such attribute, it returns the default attribute from the super class. Args: name (str): The name of the attribute. Returns: int, float, str, bool, or None: The value of the attribute. """ if name in self: return self[name] else: super().__getattribute__(name) def __delattr__(self, name): """Delete the attribute. This method deletes the attribute with the given name from the OperationContext instance. If there is no such attribute, it deletes the default attribute from the super class. Args: name (str): The name of the attribute. """ if name in self: del self[name] else: super().__delattr__(name) def get_user_agent(self): """Get the user agent string. This method returns the user agent string for the OperationContext instance. The user agent string consists of the promptflow-sdk version and any additional user agent information stored in the user_agent attribute. Returns: str: The user agent string. """ def parts(): if OperationContext.USER_AGENT_KEY in self: yield self.get(OperationContext.USER_AGENT_KEY) yield f"promptflow/{VERSION}" # strip to avoid leading or trailing spaces, which may cause error when sending request ua = " ".join(parts()).strip() return ua def append_user_agent(self, user_agent: str): """Append the user agent string. This method appends user agent information to the user_agent attribute of the OperationContext instance. If there is no user_agent attribute, it creates one with the given user agent information. Args: user_agent (str): The user agent information to append. """ if OperationContext.USER_AGENT_KEY in self: if user_agent not in self.user_agent: self.user_agent = f"{self.user_agent.strip()} {user_agent.strip()}" else: self.user_agent = user_agent def set_batch_input_source_from_inputs_mapping(self, inputs_mapping: Mapping[str, str]): """Infer the batch input source from the input mapping and set it in the OperationContext instance. This method analyzes the `inputs_mapping` to ascertain the origin of the inputs for a batch operation. The `inputs_mapping` should be a dictionary with keys representing input names and values specifying the sources of these inputs. Inputs can originate from direct data or from the outputs of a previous run. The `inputs_mapping` is dictated entirely by the external caller. For more details on column mapping, refer to https://aka.ms/pf/column-mapping. The mapping can include references to both the inputs and outputs of previous runs, using a reserved source name 'run' to indicate such references. However, this method specifically checks for references to outputs of previous runs, which are denoted by values starting with "${run.outputs". When such a reference is found, the `batch_input_source` attribute of the OperationContext instance is set to "Run" to reflect that the batch operation is utilizing outputs from a prior run. If no values in the `inputs_mapping` start with "${run.outputs", it is inferred that the inputs do not derive from a previous run, and the `batch_input_source` is set to "Data". Examples of `inputs_mapping`: - Referencing a previous run's output: {'input1': '${run.outputs.some_output}', 'input2': 'direct_data'} In this case, 'input1' is sourced from a prior run's output, and 'input2' is from direct data. The `batch_input_source` would be set to "Run". - Sourcing directly from data: {'input1': 'data_source1', 'input2': 'data_source2'} Since no values start with "${run.outputs", the `batch_input_source` is set to "Data". Args: inputs_mapping (Mapping[str, str]): A dictionary mapping input names to their sources, where the sources can be either direct data or outputs from a previous run. The structure and content of this mapping are entirely under the control of the external caller. Returns: None """ if inputs_mapping and any( isinstance(value, str) and value.startswith("${run.outputs") for value in inputs_mapping.values() ): self.batch_input_source = "Run" else: self.batch_input_source = "Data" def get_context_dict(self): """Get the context dictionary. This method returns the context dictionary for the OperationContext instance. The context dictionary is a dictionary that contains all the context information stored in the OperationContext instance. Returns: dict: The context dictionary. """ return dict(self)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/thread_local_singleton.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from contextvars import ContextVar from typing import Type, TypeVar T = TypeVar("T") class ThreadLocalSingleton: # Use context variable to enable thread local singleton # See reference: https://docs.python.org/3/library/contextvars.html#contextvars.ContextVar CONTEXT_VAR_NAME = "ThreadLocalSingleton" context_var = ContextVar(CONTEXT_VAR_NAME, default=None) @classmethod def active_instance(cls: Type[T]) -> T: return cls.context_var.get() @classmethod def active(cls) -> bool: return cls.active_instance() is not None def _activate_in_context(self, force=False): instance = self.active_instance() if instance is not None and instance is not self and not force: raise NotImplementedError(f"Cannot set active since there is another active instance: {instance}") self.context_var.set(self) def _deactivate_in_context(self): self.context_var.set(None)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/generator_proxy.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- class GeneratorProxy: """A proxy for generator that can record all items that have been yielded from the generator.""" def __init__(self, generator): self._generator = generator self._items = [] def __iter__(self): return self def __next__(self): item = next(self._generator) self._items.append(item) return item @property def items(self): return self._items def generate_from_proxy(proxy: GeneratorProxy): yield from proxy
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/_errors.py
from traceback import TracebackException from promptflow._utils.exception_utils import ( ADDITIONAL_INFO_USER_EXECUTION_ERROR, is_pf_core_frame, last_frame_info, remove_suffix, ) from promptflow.exceptions import ErrorTarget, SystemErrorException, UserErrorException, ValidationException class UnexpectedError(SystemErrorException): """Exception raised for unexpected errors that should not occur under normal circumstances.""" pass class NotSupported(UserErrorException): """This exception should be raised when a feature is not supported by the package or product. Customers should take action, such as upgrading the package or using the product in the correct way, to resolve it. """ pass class PackageToolNotFoundError(ValidationException): """Exception raised when package tool is not found in the current runtime environment.""" pass class MissingRequiredInputs(ValidationException): pass class InputTypeMismatch(ValidationException): pass class ToolCanceledError(UserErrorException): """Exception raised when tool execution is canceled.""" pass class InvalidSource(ValidationException): pass class ToolLoadError(UserErrorException): """Exception raised when tool load failed.""" def __init__(self, module: str = None, **kwargs): super().__init__(target=ErrorTarget.TOOL, module=module, **kwargs) class ToolExecutionError(UserErrorException): """Exception raised when tool execution failed.""" def __init__(self, *, node_name: str, module: str = None): self._node_name = node_name super().__init__(target=ErrorTarget.TOOL, module=module) @property def message(self): if self.inner_exception: error_type_and_message = f"({self.inner_exception.__class__.__name__}) {self.inner_exception}" return remove_suffix(self._message, ".") + f": {error_type_and_message}" else: return self._message @property def message_format(self): return "Execution failure in '{node_name}'." @property def message_parameters(self): return {"node_name": self._node_name} @property def tool_last_frame_info(self): """Return the line number inside the tool where the error occurred.""" return last_frame_info(self.inner_exception) @property def tool_traceback(self): """Return the traceback inside the tool's source code scope. The traceback inside the promptflow's internal code will be taken off. """ exc = self.inner_exception if exc and exc.__traceback__ is not None: tb = exc.__traceback__.tb_next if tb is not None: # The first frames are always our code invoking the tool. # We do not want to dump it to user code's traceback. # So, skip these frames from pf core module. while is_pf_core_frame(tb.tb_frame) and tb.tb_next is not None: tb = tb.tb_next # We don't use traceback.format_exception since its interface differs between 3.8 and 3.10. # Use this internal class to adapt to different python versions. te = TracebackException(type(exc), exc, tb) formatted_tb = "".join(te.format()) return formatted_tb return None @property def additional_info(self): """Set the tool exception details as additional info.""" if not self.inner_exception: # Only populate additional info when inner exception is present. return None info = { "type": self.inner_exception.__class__.__name__, "message": str(self.inner_exception), "traceback": self.tool_traceback, } info.update(self.tool_last_frame_info) return { ADDITIONAL_INFO_USER_EXECUTION_ERROR: info, } class GenerateMetaUserError(UserErrorException): """Base exception raised when failed to validate tool.""" def __init__(self, **kwargs): super().__init__(target=ErrorTarget.EXECUTOR, **kwargs) class MetaFileNotFound(GenerateMetaUserError): pass class MetaFileReadError(GenerateMetaUserError): pass class RunRecordNotFound(SystemErrorException): pass class FlowOutputUnserializable(UserErrorException): pass class ProcessPoolError(SystemErrorException): pass class DuplicateToolMappingError(ValidationException): """Exception raised when multiple tools are linked to the same deprecated tool id.""" pass
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/connection_manager.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import copy import json import os from dataclasses import fields, is_dataclass from pathlib import Path from typing import Any, Dict, List from promptflow._constants import CONNECTION_NAME_PROPERTY, CONNECTION_SECRET_KEYS, PROMPTFLOW_CONNECTIONS from promptflow._sdk._constants import CustomStrongTypeConnectionConfigs from promptflow._utils.utils import try_import from promptflow.contracts.tool import ConnectionType from promptflow.contracts.types import Secret class ConnectionManager: """This class will be used for construction mode to run flow. Do not include it into tool code.""" instance = None def __init__(self, _dict: Dict[str, dict] = None): if _dict is None and PROMPTFLOW_CONNECTIONS in os.environ: # !!! Important !!!: Do not leverage this environment variable in any production code, this is test only. if PROMPTFLOW_CONNECTIONS not in os.environ: raise ValueError(f"Required environment variable {PROMPTFLOW_CONNECTIONS!r} not set.") connection_path = Path(os.environ[PROMPTFLOW_CONNECTIONS]).resolve().absolute() if not connection_path.exists(): raise ValueError(f"Connection file not exists. Path {connection_path.as_posix()}.") _dict = json.loads(open(connection_path).read()) self._connections_dict = _dict or {} self._connections = self._build_connections(self._connections_dict) @classmethod def _build_connections(cls, _dict: Dict[str, dict]): """Build connection dict.""" from promptflow._core.tools_manager import connections as cls_mapping cls.import_requisites(_dict) connections = {} # key to connection object for key, connection_dict in _dict.items(): typ = connection_dict.get("type") if typ not in cls_mapping: supported = [key for key in cls_mapping.keys() if not key.startswith("_")] raise ValueError(f"Unknown connection {key!r} type {typ!r}, supported are {supported}.") value = connection_dict.get("value", {}) connection_class = cls_mapping[typ] from promptflow.connections import CustomConnection if connection_class is CustomConnection: # Note: CustomConnection definition can not be got, secret keys will be provided in connection dict. secret_keys = connection_dict.get("secret_keys", []) secrets = {k: v for k, v in value.items() if k in secret_keys} configs = {k: v for k, v in value.items() if k not in secrets} connection_value = connection_class(configs=configs, secrets=secrets) if CustomStrongTypeConnectionConfigs.PROMPTFLOW_TYPE_KEY in configs: connection_value.custom_type = configs[CustomStrongTypeConnectionConfigs.PROMPTFLOW_TYPE_KEY] else: """ Note: Ignore non exists keys of connection class, because there are some keys just used by UX like resource id, while not used by backend. """ if is_dataclass(connection_class): # Do not delete this branch, as promptflow_vectordb.connections is dataclass type. cls_fields = {f.name: f for f in fields(connection_class)} connection_value = connection_class(**{k: v for k, v in value.items() if k in cls_fields}) secret_keys = [f.name for f in cls_fields.values() if f.type == Secret] else: connection_value = connection_class(**{k: v for k, v in value.items()}) secrets = getattr(connection_value, "secrets", {}) secret_keys = list(secrets.keys()) if isinstance(secrets, dict) else [] # Set secret keys for log scrubbing setattr(connection_value, CONNECTION_SECRET_KEYS, secret_keys) # Use this hack to make sure serialization works setattr(connection_value, CONNECTION_NAME_PROPERTY, key) connections[key] = connection_value return connections @classmethod def init_from_env(cls): return ConnectionManager() def get(self, connection_info: Any) -> Any: """Get Connection by connection info. connection_info: connection name as string or connection object """ if isinstance(connection_info, str): return self._connections.get(connection_info) elif ConnectionType.is_connection_value(connection_info): return connection_info return None def get_secret_list(self) -> List[str]: def secrets(): for connection in self._connections.values(): secret_keys = getattr(connection, CONNECTION_SECRET_KEYS, []) for secret_key in secret_keys: yield getattr(connection, secret_key) return list(secrets()) @classmethod def import_requisites(cls, _dict: Dict[str, dict]): """Import connection required modules.""" modules = set() for key, connection_dict in _dict.items(): module = connection_dict.get("module") if module: modules.add(module) for module in modules: # Suppress import error, as we have legacy module promptflow.tools.connections. try_import(module, f"Import connection module {module!r} failed.", raise_error=False) @staticmethod def is_legacy_connections(_dict: Dict[str, dict]): """Detect if is legacy connections. Legacy connections dict doesn't have module and type. So import requisites can not be performed. Only request from MT will hit this. Legacy connection example: {"aoai_config": {"api_key": "..."}} """ has_module = any(isinstance(v, dict) and "module" in v for k, v in _dict.items()) return not has_module def to_connections_dict(self) -> dict: """Get all connections and reformat to key-values format.""" # Value returned: {"aoai_config": {"api_key": "..."}} return copy.deepcopy(self._connections_dict)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/cache_manager.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import hashlib import json from dataclasses import dataclass from typing import Callable, List from promptflow._utils.logger_utils import flow_logger from promptflow.contracts.run_info import RunInfo from promptflow.storage import AbstractCacheStorage, AbstractRunStorage PROMPTFLOW_HASH_ATTR = "__promptflow_hash_func" def get_calculate_cache_func(tool_func): return getattr(tool_func, PROMPTFLOW_HASH_ATTR, None) def set_calculate_cache_func(tool_func, calculate_cache_func): setattr(tool_func, PROMPTFLOW_HASH_ATTR, calculate_cache_func) def enable_cache(calculate_cache_func): def decorator_enable_cache(func): set_calculate_cache_func(func, calculate_cache_func) return func return decorator_enable_cache @dataclass class CacheInfo: hash_id: str = None cache_string: str = None @dataclass class CacheResult: result: object = None cached_run_id: str = None cached_flow_run_id: str = None hit_cache: bool = False class AbstractCacheManager: @staticmethod def init_from_env() -> "AbstractCacheManager": # TODO: Return CacheManager after local execution is enabled. return DummyCacheManager() def calculate_cache_info(self, flow_id: str, tool_method: Callable, args, kwargs) -> CacheInfo: raise NotImplementedError("AbstractCacheManager has not implemented method calculate_cache_info.") def get_cache_result(self, cache_info: CacheInfo) -> CacheResult: raise NotImplementedError("AbstractCacheManager has not implemented method get_cache_result.") def persist_result(self, run_info: RunInfo, hash_id: str, cache_string: str, flow_id: str): raise NotImplementedError("AbstractCacheManager has not implemented method persist_result.") class DummyCacheManager(AbstractCacheManager): def __init__(self): pass def calculate_cache_info(self, flow_id: str, tool_method: Callable, args, kwargs) -> CacheInfo: return None def get_cache_result(self, cache_info: CacheInfo) -> CacheResult: return None def persist_result(self, run_info: RunInfo, hash_id: str, cache_string: str, flow_id: str): pass class CacheManager(AbstractCacheManager): def __init__(self, run_storage: AbstractRunStorage, cache_storage: AbstractCacheStorage): self._run_storage = run_storage self._cache_storage = cache_storage def calculate_cache_info(self, flow_id: str, tool_method: Callable, args, kwargs) -> CacheInfo: cache_function = get_calculate_cache_func(tool_method) # Cache function is not registered with this tool. if cache_function is None: return None # Calculate cache string and hash id. try: cache_string = cache_function(*args, **kwargs) except Exception as ex: flow_logger.warning(f"Failed to calculate cache string. Exception: {ex}") return None # Add flow_id and tool_name in the cache string. # So that different flow_id and tool_name cannot reuse. other_cache_string = json.dumps( { "flow_id": flow_id, "tool_name": tool_method.__qualname__, } ) cache_string += other_cache_string hash_id = self._calculate_hash_id(cache_string) return CacheInfo(hash_id=hash_id, cache_string=cache_string) def get_cache_result(self, cache_info: CacheInfo) -> CacheResult: hash_id = cache_info.hash_id # Query if cache result existed by hash_id. cache_result_list: List[CacheInfo] = self._cache_storage.get_cache_record_list(hash_id=hash_id) if len(cache_result_list) == 0: return None # Get the latest cache result. cache_result = sorted(cache_result_list, reverse=True, key=lambda i: i.end_time)[0] try: cached_run_info = self._run_storage.get_node_run(cache_result.run_id) except Exception as ex: flow_logger.warning( f"Failed to get cached run result. \ Run id:{cached_run_info.run_id}, flow run id: {cached_run_info.flow_run_id} \ Exception: {ex}" ) return None flow_logger.info( f"Hit cached result of previous run: run id: \ {cached_run_info.run_id}, flow run id: {cached_run_info.flow_run_id}" ) return CacheResult( result=cached_run_info.result, cached_run_id=cached_run_info.run_id, cached_flow_run_id=cached_run_info.flow_run_id, hit_cache=True, ) def persist_result(self, run_info: RunInfo, cache_info: CacheInfo, flow_id: str): self._cache_storage.persist_cache_result(run_info, cache_info.hash_id, cache_info.cache_string, flow_id) @staticmethod def _calculate_hash_id(cache_string: str): return hashlib.sha1(cache_string.encode("utf-8")).hexdigest()
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/tool.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import inspect import logging from abc import ABC from dataclasses import InitVar, asdict, dataclass, field from enum import Enum from typing import Callable, Dict, List, Optional, Union from promptflow._core.tracer import _traced from promptflow.contracts.trace import TraceType module_logger = logging.getLogger(__name__) STREAMING_OPTION_PARAMETER_ATTR = "_streaming_option_parameter" # copied from promptflow.contracts.tool import ToolType class ToolType(str, Enum): LLM = "llm" PYTHON = "python" CSHARP = "csharp" PROMPT = "prompt" _ACTION = "action" CUSTOM_LLM = "custom_llm" class ToolInvoker(ABC): _active_tool_invoker: Optional["ToolInvoker"] = None def invoke_tool(self, f, *args, **kwargs): raise NotImplementedError() @classmethod def activate(cls, tool_invoker: "ToolInvoker"): cls._active_tool_invoker = tool_invoker @classmethod def deactivate(cls): cls._active_tool_invoker = None @classmethod def active_instance(cls) -> Optional["ToolInvoker"]: return cls._active_tool_invoker def tool( func=None, *, name: str = None, description: str = None, type: str = None, input_settings=None, streaming_option_parameter: Optional[str] = None, **kwargs, ) -> Callable: """Decorator for tool functions. The decorated function will be registered as a tool and can be used in a flow. :param name: The tool name. :type name: str :param description: The tool description. :type description: str :param type: The tool type. :type type: str :param input_settings: Dict of input setting. :type input_settings: Dict[str, promptflow.entities.InputSetting] :return: The decorated function. :rtype: Callable """ def tool_decorator(func: Callable) -> Callable: from promptflow.exceptions import UserErrorException if type is not None and type not in [k.value for k in ToolType]: raise UserErrorException(f"Tool type {type} is not supported yet.") # Calls to tool functions should be traced automatically. new_f = _traced(func, trace_type=TraceType.TOOL) new_f.__tool = None # This will be set when generating the tool definition. new_f.__name = name new_f.__description = description new_f.__type = type new_f.__input_settings = input_settings new_f.__extra_info = kwargs if streaming_option_parameter and isinstance(streaming_option_parameter, str): setattr(new_f, STREAMING_OPTION_PARAMETER_ATTR, streaming_option_parameter) return new_f # enable use decorator without "()" if all arguments are default values if func is not None: return tool_decorator(func) return tool_decorator def parse_all_args(argnames, args, kwargs) -> dict: """Parse args + kwargs to kwargs.""" all_args = {name: value for name, value in zip(argnames, args)} all_args.update(kwargs) return all_args class ToolProvider(ABC): """The base class of tool class.""" _initialize_inputs = None _required_initialize_inputs = None _instance_init_params = None def __new__(cls, *args, **kwargs): # Record the init parameters, use __new__ so that user doesn't need to # repeat parameters when calling super().__init__() cls._instance_init_params = parse_all_args(cls.get_initialize_inputs().keys(), args, kwargs) return super(ToolProvider, cls).__new__(cls) def __init__(self): """ Define the base inputs of each tool. All the parameters of __init__ will be added to inputs of each @tool in the class. """ self._init_params = self._instance_init_params @classmethod def get_initialize_inputs(cls): if not cls._initialize_inputs: cls._initialize_inputs = { k: v for k, v in inspect.signature(cls.__init__).parameters.items() if k != "self" } return cls._initialize_inputs @classmethod def get_required_initialize_inputs(cls): if not cls._required_initialize_inputs: cls._required_initialize_inputs = { k: v for k, v in inspect.signature(cls.__init__).parameters.items() if k != "self" and v.default is inspect.Parameter.empty } return cls._required_initialize_inputs @dataclass class DynamicList: function: InitVar[Union[str, Callable]] """The dynamic list function.""" input_mapping: InitVar[Dict] = None """The mapping between dynamic list function inputs and tool inputs.""" func_path: str = field(init=False) func_kwargs: List = field(init=False) def __post_init__(self, function, input_mapping): from promptflow._sdk._constants import SKIP_FUNC_PARAMS from promptflow._utils.tool_utils import _get_function_path, function_to_interface self._func_obj, self.func_path = _get_function_path(function) self._input_mapping = input_mapping or {} dynamic_list_func_inputs, _, _, _ = function_to_interface( self._func_obj, gen_custom_type_conn=True, skip_prompt_template=True ) # Get function input info self.func_kwargs = [] inputs = inspect.signature(self._func_obj).parameters for name, value in dynamic_list_func_inputs.items(): if name not in SKIP_FUNC_PARAMS: input_info = {"name": name} input_info.update(asdict(value, dict_factory=lambda x: {k: v for (k, v) in x if v})) if name in self._input_mapping: input_info["reference"] = f"${{inputs.{self._input_mapping[name]}}}" input_info["optional"] = inputs[name].default is not inspect.Parameter.empty if input_info["optional"]: input_info["default"] = inputs[name].default self.func_kwargs.append(input_info) @dataclass class GeneratedBy: """Settings of the generated by""" function: InitVar[Union[str, Callable]] """The generated by function.""" reverse_function: InitVar[Union[str, Callable]] """The reverse generated by function.""" input_settings: InitVar[Dict[str, object]] = None """The input settings of generated by function.""" func_path: str = field(init=False) func_kwargs: List = field(init=False) reverse_func_path: str = field(init=False) def __post_init__(self, function, reverse_function, input_settings): from promptflow._sdk._constants import SKIP_FUNC_PARAMS, UIONLY_HIDDEN from promptflow._utils.tool_utils import _get_function_path, function_to_interface self._func_obj, self.func_path = _get_function_path(function=function) self._reverse_func_obj, self.reverse_func_path = _get_function_path(function=reverse_function) self._input_settings = {} generated_func_inputs, _, _, _ = function_to_interface( self._func_obj, gen_custom_type_conn=True, skip_prompt_template=True ) # Get function input info self.func_kwargs = [] func_inputs = inspect.signature(self._func_obj).parameters for name, value in generated_func_inputs.items(): if name not in SKIP_FUNC_PARAMS: # Update kwargs in generated_by settings input_info = {"name": name} input_info.update(asdict(value, dict_factory=lambda x: {k: v for (k, v) in x if v})) input_info["reference"] = f"${{inputs.{name}}}" input_info["optional"] = func_inputs[name].default is not inspect.Parameter.empty self.func_kwargs.append(input_info) # Generated generated_by input settings in tool func if name in input_settings: self._input_settings[name] = asdict( input_settings[name], dict_factory=lambda x: {k: v for (k, v) in x if v} ) if "type" in input_info: self._input_settings[name]["type"] = input_info["type"] self._input_settings[name]["input_type"] = UIONLY_HIDDEN @dataclass class InputSetting: """Settings of the tool input""" is_multi_select: bool = None """Allow user to select multiple values.""" allow_manual_entry: bool = None """Allow user to enter input value manually.""" enabled_by: str = None """The input field which must be an enum type, that controls the visibility of the dependent input field.""" enabled_by_value: List = None """Defines the accepted enum values from the enabled_by field that will make this dependent input field visible.""" dynamic_list: DynamicList = None """Settings of dynamic list function.""" generated_by: GeneratedBy = None """Settings of generated by function."""
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/tool_meta_generator.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- """ This file can generate a meta file for the given prompt template or a python file. """ import importlib.util import inspect import json import re import types from dataclasses import asdict from pathlib import Path from traceback import TracebackException from jinja2 import TemplateSyntaxError from jinja2.environment import COMMENT_END_STRING, COMMENT_START_STRING from promptflow._core._errors import MetaFileNotFound, MetaFileReadError, NotSupported from promptflow._core.tool import ToolProvider from promptflow._utils.exception_utils import ADDITIONAL_INFO_USER_CODE_STACKTRACE, get_tb_next, last_frame_info from promptflow._utils.tool_utils import function_to_interface, get_inputs_for_prompt_template from promptflow.contracts.tool import Tool, ToolType from promptflow.exceptions import ErrorTarget, UserErrorException PF_MAIN_MODULE_NAME = "__pf_main__" def asdict_without_none(obj): return asdict(obj, dict_factory=lambda x: {k: v for (k, v) in x if v}) def generate_prompt_tool(name, content, prompt_only=False, source=None): """Generate meta for a single jinja template file.""" # Get all the variable name from a jinja template tool_type = ToolType.PROMPT if prompt_only else ToolType.LLM try: inputs = get_inputs_for_prompt_template(content) except TemplateSyntaxError as e: error_type_and_message = f"({e.__class__.__name__}) {e}" raise JinjaParsingError( message_format=( "Generate tool meta failed for {tool_type} tool. Jinja parsing failed at line {line_number}: " "{error_type_and_message}" ), tool_type=tool_type.value, line_number=e.lineno, error_type_and_message=error_type_and_message, ) from e except Exception as e: error_type_and_message = f"({e.__class__.__name__}) {e}" raise JinjaParsingError( message_format=( "Generate tool meta failed for {tool_type} tool. Jinja parsing failed: {error_type_and_message}" ), tool_type=tool_type.value, error_type_and_message=error_type_and_message, ) from e pattern = f"{COMMENT_START_STRING}(((?!{COMMENT_END_STRING}).)*){COMMENT_END_STRING}" match_result = re.match(pattern, content) description = match_result.groups()[0].strip() if match_result else None # Construct the Tool structure tool = Tool( name=name, description=description, type=tool_type, inputs=inputs, outputs={}, ) if source is None: tool.code = content else: tool.source = source return tool def generate_prompt_meta_dict(name, content, prompt_only=False, source=None): return asdict_without_none(generate_prompt_tool(name, content, prompt_only, source)) def is_tool(f): if not isinstance(f, types.FunctionType): return False if not hasattr(f, "__tool"): return False return True def collect_tool_functions_in_module(m): tools = [] for _, obj in inspect.getmembers(m): if is_tool(obj): # Note that the tool should be in defined in exec but not imported in exec, # so it should also have the same module with the current function. if getattr(obj, "__module__", "") != m.__name__: continue tools.append(obj) return tools def collect_tool_methods_in_module(m): tools = [] for _, obj in inspect.getmembers(m): if isinstance(obj, type) and issubclass(obj, ToolProvider) and obj.__module__ == m.__name__: for _, method in inspect.getmembers(obj): if is_tool(method): tools.append(method) return tools def collect_tool_methods_with_init_inputs_in_module(m): tools = [] for _, obj in inspect.getmembers(m): if isinstance(obj, type) and issubclass(obj, ToolProvider) and obj.__module__ == m.__name__: for _, method in inspect.getmembers(obj): if is_tool(method): tools.append((method, obj.get_initialize_inputs())) return tools def _parse_tool_from_function(f, initialize_inputs=None, gen_custom_type_conn=False, skip_prompt_template=False): try: tool_type = getattr(f, "__type", None) or ToolType.PYTHON except Exception as e: raise e tool_name = getattr(f, "__name", None) description = getattr(f, "__description", None) if hasattr(f, "__tool") and isinstance(f.__tool, Tool): return f.__tool if hasattr(f, "__original_function"): f = f.__original_function try: inputs, _, _, enable_kwargs = function_to_interface( f, initialize_inputs=initialize_inputs, gen_custom_type_conn=gen_custom_type_conn, skip_prompt_template=skip_prompt_template, ) except Exception as e: error_type_and_message = f"({e.__class__.__name__}) {e}" raise BadFunctionInterface( message_format="Parse interface for tool '{tool_name}' failed: {error_type_and_message}", tool_name=f.__name__, error_type_and_message=error_type_and_message, ) from e class_name = None if "." in f.__qualname__: class_name = f.__qualname__.replace(f".{f.__name__}", "") # Construct the Tool structure return Tool( name=tool_name or f.__qualname__, description=description or inspect.getdoc(f), inputs=inputs, type=tool_type, class_name=class_name, function=f.__name__, module=f.__module__, enable_kwargs=enable_kwargs, ) def generate_python_tools_in_module(module): tool_functions = collect_tool_functions_in_module(module) tool_methods = collect_tool_methods_in_module(module) return [_parse_tool_from_function(f) for f in tool_functions + tool_methods] def generate_python_tools_in_module_as_dict(module): tools = generate_python_tools_in_module(module) return {f"{t.module}.{t.name}": asdict_without_none(t) for t in tools} def load_python_module_from_file(src_file: Path): # Here we hard code the module name as __pf_main__ since it is invoked as a main script in pf. src_file = Path(src_file).resolve() # Make sure the path is absolute to align with python import behavior. spec = importlib.util.spec_from_file_location("__pf_main__", location=src_file) if spec is None or spec.loader is None: raise PythonLoaderNotFound( message_format="Failed to load python file '{src_file}'. Please make sure it is a valid .py file.", src_file=src_file, ) m = importlib.util.module_from_spec(spec) try: spec.loader.exec_module(m) except Exception as e: # TODO: add stacktrace to additional info error_type_and_message = f"({e.__class__.__name__}) {e}" raise PythonLoadError( message_format="Failed to load python module from file '{src_file}': {error_type_and_message}", src_file=src_file, error_type_and_message=error_type_and_message, ) from e return m def load_python_module(content, source=None): # Source represents code first experience. if source is not None and Path(source).exists(): return load_python_module_from_file(Path(source)) try: m = types.ModuleType(PF_MAIN_MODULE_NAME) exec(content, m.__dict__) return m except Exception as e: error_type_and_message = f"({e.__class__.__name__}) {e}" raise PythonParsingError( message_format="Failed to load python module. Python parsing failed: {error_type_and_message}", error_type_and_message=error_type_and_message, ) from e def collect_tool_function_in_module(m): tool_functions = collect_tool_functions_in_module(m) tool_methods = collect_tool_methods_with_init_inputs_in_module(m) num_tools = len(tool_functions) + len(tool_methods) if num_tools == 0: raise NoToolDefined( message_format=( "No tool found in the python script. " "Please make sure you have one and only one tool definition in your script." ) ) elif num_tools > 1: tool_names = ", ".join(t.__name__ for t in tool_functions + tool_methods) raise MultipleToolsDefined( message_format=( "Expected 1 but collected {tool_count} tools: {tool_names}. " "Please make sure you have one and only one tool definition in your script." ), tool_count=num_tools, tool_names=tool_names, ) if tool_functions: return tool_functions[0], None else: return tool_methods[0] def generate_python_tool(name, content, source=None): m = load_python_module(content, source) f, initialize_inputs = collect_tool_function_in_module(m) tool = _parse_tool_from_function(f, initialize_inputs=initialize_inputs) tool.module = None if name is not None: tool.name = name if source is None: tool.code = content else: tool.source = source return tool def generate_python_meta_dict(name, content, source=None): return asdict_without_none(generate_python_tool(name, content, source)) # Only used in non-code first experience. def generate_python_meta(name, content, source=None): return json.dumps(generate_python_meta_dict(name, content, source), indent=2) def generate_prompt_meta(name, content, prompt_only=False, source=None): return json.dumps(generate_prompt_meta_dict(name, content, prompt_only, source), indent=2) def generate_tool_meta_dict_by_file(path: str, tool_type: ToolType): """Generate meta for a single tool file, which can be a python file or a jinja template file, note that if a python file is passed, correct working directory must be set and should be added to sys.path. """ tool_type = ToolType(tool_type) file = Path(path) if not file.is_file(): raise MetaFileNotFound( message_format="Generate tool meta failed for {tool_type} tool. Meta file '{file_path}' can not be found.", tool_type=tool_type.value, file_path=path, # Use a relative path here to make the error message more readable. ) try: content = file.read_text(encoding="utf-8") except Exception as e: error_type_and_message = f"({e.__class__.__name__}) {e}" raise MetaFileReadError( message_format=( "Generate tool meta failed for {tool_type} tool. " "Read meta file '{file_path}' failed: {error_type_and_message}" ), tool_type=tool_type.value, file_path=path, error_type_and_message=error_type_and_message, ) from e name = file.stem if tool_type == ToolType.PYTHON: return generate_python_meta_dict(name, content, path) elif tool_type == ToolType.LLM: return generate_prompt_meta_dict(name, content, source=path) elif tool_type == ToolType.PROMPT: return generate_prompt_meta_dict(name, content, prompt_only=True, source=path) else: raise NotSupported( message_format=( "Generate tool meta failed. " "The type '{tool_type}' is currently unsupported. " "Please choose from available types: {supported_tool_types} and try again." ), tool_type=tool_type.value, supported_tool_types=",".join([ToolType.PYTHON, ToolType.LLM, ToolType.PROMPT]), ) class ToolValidationError(UserErrorException): """Base exception raised when failed to validate tool.""" def __init__(self, **kwargs): super().__init__(target=ErrorTarget.TOOL, **kwargs) class JinjaParsingError(ToolValidationError): pass class ReservedVariableCannotBeUsed(JinjaParsingError): pass class PythonParsingError(ToolValidationError): pass class PythonLoaderNotFound(ToolValidationError): pass class NoToolDefined(PythonParsingError): pass class MultipleToolsDefined(PythonParsingError): pass class BadFunctionInterface(PythonParsingError): pass class PythonLoadError(PythonParsingError): @property def python_load_traceback(self): """Return the traceback inside user's source code scope. The traceback inside the promptflow's internal code will be taken off. """ exc = self.inner_exception if exc and exc.__traceback__ is not None: tb = exc.__traceback__ # The first three frames are always the code in tool.py who invokes the tool. # We do not want to dump it to user code's traceback. tb = get_tb_next(tb, next_cnt=3) if tb is not None: te = TracebackException(type(exc), exc, tb) formatted_tb = "".join(te.format()) return formatted_tb return None @property def additional_info(self): """Set the python load exception details as additional info.""" if not self.inner_exception: return None info = { "type": self.inner_exception.__class__.__name__, "message": str(self.inner_exception), "traceback": self.python_load_traceback, } info.update(last_frame_info(self.inner_exception)) return { ADDITIONAL_INFO_USER_CODE_STACKTRACE: info, }
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_core/token_provider.py
import threading from abc import ABC, abstractmethod from promptflow.exceptions import UserErrorException # to access azure ai services, we need to get the token with this audience COGNITIVE_AUDIENCE = "https://cognitiveservices.azure.com/" class TokenProviderABC(ABC): def __init__(self) -> None: super().__init__() @abstractmethod def get_token(self) -> str: pass class StaticTokenProvider(TokenProviderABC): def __init__(self, token: str) -> None: super().__init__() self.token = token def get_token(self) -> str: return self.token class AzureTokenProvider(TokenProviderABC): _instance_lock = threading.Lock() _instance = None def __new__(cls, *args, **kwargs): with cls._instance_lock: if not cls._instance: cls._instance = super().__new__(cls) cls._instance._init_instance() return cls._instance def _init_instance(self): try: # Initialize a credential instance from azure.identity import DefaultAzureCredential self.credential = DefaultAzureCredential() except ImportError as ex: raise UserErrorException( "Failed to initialize AzureTokenProvider. " + f"Please try 'pip install azure.identity' to install dependency, {ex.msg}." ) def get_token(self): audience = COGNITIVE_AUDIENCE return self.credential.get_token(audience).token
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore from ._pf_client import PFClient __all__ = ["PFClient"]
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_pf_client.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import os from os import PathLike from typing import Dict, List, Optional, Union from azure.ai.ml import MLClient from azure.core.credentials import TokenCredential from promptflow._sdk._constants import MAX_SHOW_DETAILS_RESULTS from promptflow._sdk._errors import RunOperationParameterError from promptflow._sdk._user_agent import USER_AGENT from promptflow._sdk._utils import ClientUserAgentUtil, setup_user_agent_to_operation_context from promptflow._sdk.entities import Run from promptflow.azure._restclient.service_caller_factory import _FlowServiceCallerFactory from promptflow.azure.operations import RunOperations from promptflow.azure.operations._arm_connection_operations import ArmConnectionOperations from promptflow.azure.operations._connection_operations import ConnectionOperations from promptflow.azure.operations._flow_operations import FlowOperations from promptflow.exceptions import UserErrorException class PFClient: """A client class to interact with Promptflow service. Use this client to manage promptflow resources, e.g. runs. :param credential: Credential to use for authentication, optional :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: Azure subscription ID, optional for registry assets only, optional :type subscription_id: typing.Optional[str] :param resource_group_name: Azure resource group, optional for registry assets only, optional :type resource_group_name: typing.Optional[str] :param workspace_name: Workspace to use in the client, optional for non workspace dependent operations only, optional. :type workspace_name: typing.Optional[str] :param kwargs: A dictionary of additional configuration parameters. :type kwargs: dict """ def __init__( self, credential: TokenCredential = None, subscription_id: Optional[str] = None, resource_group_name: Optional[str] = None, workspace_name: Optional[str] = None, **kwargs, ): self._validate_config_information(subscription_id, resource_group_name, workspace_name, kwargs) # add user agent from kwargs if any if isinstance(kwargs.get("user_agent", None), str): ClientUserAgentUtil.append_user_agent(kwargs["user_agent"]) # append SDK ua to context user_agent = setup_user_agent_to_operation_context(USER_AGENT) kwargs.setdefault("user_agent", user_agent) self._ml_client = kwargs.pop("ml_client", None) or MLClient( credential=credential, subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, **kwargs, ) try: workspace = self._ml_client.workspaces.get(name=self._ml_client._operation_scope.workspace_name) except Exception as e: raise UserErrorException(message=str(e), error=e) self._service_caller = _FlowServiceCallerFactory.get_instance( workspace=workspace, credential=self._ml_client._credential, operation_scope=self._ml_client._operation_scope, **kwargs, ) self._flows = FlowOperations( operation_scope=self._ml_client._operation_scope, operation_config=self._ml_client._operation_config, all_operations=self._ml_client._operation_container, credential=self._ml_client._credential, service_caller=self._service_caller, workspace=workspace, **kwargs, ) self._runs = RunOperations( operation_scope=self._ml_client._operation_scope, operation_config=self._ml_client._operation_config, all_operations=self._ml_client._operation_container, credential=self._ml_client._credential, flow_operations=self._flows, service_caller=self._service_caller, workspace=workspace, **kwargs, ) self._connections = ConnectionOperations( operation_scope=self._ml_client._operation_scope, operation_config=self._ml_client._operation_config, all_operations=self._ml_client._operation_container, credential=self._ml_client._credential, service_caller=self._service_caller, **kwargs, ) self._arm_connections = ArmConnectionOperations( operation_scope=self._ml_client._operation_scope, operation_config=self._ml_client._operation_config, all_operations=self._ml_client._operation_container, credential=self._ml_client._credential, service_caller=self._service_caller, **kwargs, ) @staticmethod def _validate_config_information(subscription_id, resource_group_name, workspace_name, kwargs): """Validate the config information in case wrong parameter name is passed into the constructor.""" sub_name, wrong_sub_name = "subscription_id", "subscription" rg_name, wrong_rg_name = "resource_group_name", "resource_group" ws_name, wrong_ws_name = "workspace_name", "workspace" error_message = ( "You have passed in the wrong parameter name to initialize the PFClient, please use {0!r} instead of {1!r}." ) if not subscription_id and kwargs.get(wrong_sub_name, None) is not None: raise RunOperationParameterError(error_message.format(sub_name, wrong_sub_name)) if not resource_group_name and kwargs.get(wrong_rg_name, None) is not None: raise RunOperationParameterError(error_message.format(rg_name, wrong_rg_name)) if not workspace_name and kwargs.get(wrong_ws_name, None) is not None: raise RunOperationParameterError(error_message.format(ws_name, wrong_ws_name)) @property def ml_client(self): """Return a client to interact with Azure ML services.""" return self._ml_client @property def runs(self): """Return the run operation object that can manage runs.""" return self._runs @property def flows(self): """Return the flow operation object that can manage flows.""" return self._flows @classmethod def from_config( cls, credential: TokenCredential, *, path: Optional[Union[os.PathLike, str]] = None, file_name=None, **kwargs, ) -> "PFClient": """Return a PFClient object connected to Azure Machine Learning workspace. Reads workspace configuration from a file. Throws an exception if the config file can't be found. The method provides a simple way to reuse the same workspace across multiple Python notebooks or projects. Users can save the workspace Azure Resource Manager (ARM) properties using the [workspace.write_config](https://aka.ms/ml-workspace-class) method, and use this method to load the same workspace in different Python notebooks or projects without retyping the workspace ARM properties. :param credential: The credential object for the workspace. :type credential: ~azure.core.credentials.TokenCredential :param path: The path to the config file or starting directory to search. The parameter defaults to starting the search in the current directory. optional :type path: typing.Union[os.PathLike, str] :param file_name: Allows overriding the config file name to search for when path is a directory path. (Default value = None) :type file_name: str """ ml_client = MLClient.from_config(credential=credential, path=path, file_name=file_name, **kwargs) return PFClient( ml_client=ml_client, **kwargs, ) def run( self, flow: Union[str, PathLike], *, data: Union[str, PathLike] = None, run: Union[str, Run] = None, column_mapping: dict = None, variant: str = None, connections: dict = None, environment_variables: dict = None, name: str = None, display_name: str = None, tags: Dict[str, str] = None, **kwargs, ) -> Run: """Run flow against provided data or run. .. note:: at least one of data or run must be provided. .. admonition:: Data can be local file or remote path. - Example: - `data = "path/to/local/file"` - `data = "azureml:data_name:data_version"` - `data = "azureml://datastores/datastore_name/path/to/file"` - `data = "https://example.com/data.jsonl"` Column mapping is a mapping from flow input name to specified values. If specified, the flow will be executed with provided value for specified inputs. The value can be: - from data: - ``data.col1`` - from run: - ``run.inputs.col1``: if need reference run's inputs - ``run.output.col1``: if need reference run's outputs - Example: - ``{"ground_truth": "${data.answer}", "prediction": "${run.outputs.answer}"}`` :param flow: path to flow directory to run evaluation :type flow: Union[str, PathLike] :param data: pointer to test data (of variant bulk runs) for eval runs :type data: Union[str, PathLike] :param run: flow run id or flow run, keep lineage between current run and variant runs, batch outputs can be referenced as ${run.outputs.col_name} in inputs_mapping :type run: Union[str, ~promptflow.entities.Run] :param column_mapping: define a data flow logic to map input data. :type column_mapping: dict :param variant: Node & variant name in format of ${node_name.variant_name}, will use default variant if not specified. :type variant: str :param connections: Overwrite node level connections with provided value. Example: ``{"node1": {"connection": "new_connection", "deployment_name": "gpt-35-turbo"}}`` :type connections: dict :param environment_variables: Environment variables to set by specifying a property path and value. Example: ``{"key1": "${my_connection.api_key}", "key2"="value2"}`` The value reference to connection keys will be resolved to the actual value, and all environment variables specified will be set into os.environ. :type environment_variables: dict :param name: Name of the run. :type name: str :param display_name: Display name of the run. :type display_name: str :param tags: Tags of the run. :type tags: Dict[str, str] :return: flow run info. :rtype: ~promptflow.entities.Run """ # TODO(2887134): support cloud eager Run CRUD run = Run( name=name, display_name=display_name, tags=tags, data=data, column_mapping=column_mapping, run=run, variant=variant, flow=flow, connections=connections, environment_variables=environment_variables, ) return self.runs.create_or_update(run=run, **kwargs) def stream(self, run: Union[str, Run], raise_on_error: bool = True) -> Run: """Stream run logs to the console. :param run: Run object or name of the run. :type run: Union[str, ~promptflow.sdk.entities.Run] :param raise_on_error: Raises an exception if a run fails or canceled. :type raise_on_error: bool :return: flow run info. """ if isinstance(run, Run): run = run.name return self.runs.stream(run, raise_on_error) def get_details( self, run: Union[str, Run], max_results: int = MAX_SHOW_DETAILS_RESULTS, all_results: bool = False ) -> "DataFrame": """Get the details from the run including inputs and outputs. .. note:: If `all_results` is set to True, `max_results` will be overwritten to sys.maxsize. :param run: The run name or run object :type run: Union[str, ~promptflow.sdk.entities.Run] :param max_results: The max number of runs to return, defaults to 100 :type max_results: int :param all_results: Whether to return all results, defaults to False :type all_results: bool :raises RunOperationParameterError: If `max_results` is not a positive integer. :return: The details data frame. :rtype: pandas.DataFrame """ return self.runs.get_details(run=run, max_results=max_results, all_results=all_results) def get_metrics(self, run: Union[str, Run]) -> dict: """Print run metrics to the console. :param run: Run object or name of the run. :type run: Union[str, ~promptflow.sdk.entities.Run] :return: The run's metrics :rtype: dict """ if isinstance(run, Run): run = run.name return self.runs.get_metrics(run=run) def visualize(self, runs: Union[List[str], List[Run]]) -> None: """Visualize run(s). :param run: Run object or name of the run. :type run: Union[str, ~promptflow.sdk.entities.Run] """ self.runs.visualize(runs)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_load_functions.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from os import PathLike from pathlib import Path from typing import IO, AnyStr, Optional, Union from ._utils import is_arm_id def load_flow( source: Union[str, PathLike, IO[AnyStr]], *, relative_origin: Optional[str] = None, **kwargs, ): """Construct a flow object from a yaml file. :param source: The local yaml source of a compute. Must be either a path to a local file, or an already-open file. If the source is a path, it will be open and read. An exception is raised if the file does not exist. If the source is an open file, the file will be read directly, and an exception is raised if the file is not readable. :type source: Union[PathLike, str, io.TextIOWrapper] :param relative_origin: The origin to be used when deducing the relative locations of files referenced in the parsed yaml. Defaults to the inputted source's directory if it is a file or file path input. Defaults to "./" if the source is a stream input with no name value. :type relative_origin: str :param params_override: Fields to overwrite on top of the yaml file. Format is [{"field1": "value1"}, {"field2": "value2"}] :type params_override: List[Dict] :return: Loaded flow object. :rtype: promptflow.azure.Flow """ from promptflow.azure._entities._flow import Flow if is_arm_id(source): return source return Flow(path=Path(source))
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/resources/component_spec_template.yaml
$schema: https://azuremlschemas.azureedge.net/latest/commandComponent.schema.json # will be changed to flow to support parallelism type: command outputs: output: # PRS team will always aggregate all the outputs into a single file under this folder for now type: uri_folder
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_arm_connection_operations.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from enum import Enum from typing import Any, Dict, Union import requests from azure.ai.ml._restclient.v2023_06_01_preview.models import WorkspaceConnectionPropertiesV2BasicResource from azure.ai.ml._scope_dependent_operations import ( OperationConfig, OperationsContainer, OperationScope, _ScopeDependentOperations, ) from azure.core.exceptions import ClientAuthenticationError from promptflow._sdk.entities._connection import CustomConnection, _Connection from promptflow._utils.retry_utils import http_retry_wrapper from promptflow.azure._restclient.flow_service_caller import FlowServiceCaller from promptflow.azure._utils.gerneral import get_arm_token from promptflow.exceptions import ErrorTarget, SystemErrorException, UserErrorException GET_CONNECTION_URL = ( "/subscriptions/{sub}/resourcegroups/{rg}/providers/Microsoft.MachineLearningServices" "/workspaces/{ws}/connections/{name}/listsecrets?api-version=2023-04-01-preview" ) LIST_CONNECTION_URL = ( "/subscriptions/{sub}/resourcegroups/{rg}/providers/Microsoft.MachineLearningServices" "/workspaces/{ws}/connections?api-version=2023-04-01-preview" ) FLOW_META_PREFIX = "azureml.flow." class ConnectionCategory(str, Enum): AzureOpenAI = "AzureOpenAI" CognitiveSearch = "CognitiveSearch" CognitiveService = "CognitiveService" CustomKeys = "CustomKeys" def get_case_insensitive_key(d, key, default=None): for k, v in d.items(): if k.lower() == key.lower(): return v return default class ArmConnectionOperations(_ScopeDependentOperations): """ArmConnectionOperations. Get connections from arm api. You should not instantiate this class directly. Instead, you should create an PFClient instance that instantiates it for you and attaches it as an attribute. """ def __init__( self, operation_scope: OperationScope, operation_config: OperationConfig, all_operations: OperationsContainer, credential, service_caller: FlowServiceCaller, **kwargs: Dict, ): super(ArmConnectionOperations, self).__init__(operation_scope, operation_config) self._all_operations = all_operations self._service_caller = service_caller self._credential = credential def get(self, name, **kwargs): connection_dict = self.build_connection_dict(name) return _Connection._from_execution_connection_dict(name=name, data=connection_dict) @classmethod def _direct_get(cls, name, subscription_id, resource_group_name, workspace_name, credential): """ This method is added for local pf_client with workspace provider to ensure we only require limited permission(workspace/list secrets). As create azure pf_client requires workspace read permission. """ connection_dict = cls._build_connection_dict( name, subscription_id, resource_group_name, workspace_name, credential ) return _Connection._from_execution_connection_dict(name=name, data=connection_dict) @classmethod def open_url(cls, token, url, action, host="management.azure.com", method="GET", model=None) -> Union[Any, dict]: """ :type token: str :type url: str :type action: str, for the error message format. :type host: str :type method: str :type model: Type[msrest.serialization.Model] """ headers = {"Authorization": f"Bearer {token}"} response = http_retry_wrapper(requests.request)(method, f"https://{host}{url}", headers=headers) message_format = ( f"Open url {{url}} failed with status code: {response.status_code}, action: {action}, reason: {{reason}}" ) if response.status_code == 403: raise AccessDeniedError(operation=url, target=ErrorTarget.RUNTIME) elif 400 <= response.status_code < 500: raise OpenURLFailedUserError( message_format=message_format, url=url, reason=response.reason, ) elif response.status_code != 200: raise OpenURLFailed( message_format=message_format, url=url, reason=response.reason, ) data = response.json() if model: return model.deserialize(data) return data @classmethod def validate_and_fallback_connection_type(cls, name, type_name, category, metadata): if type_name: return type_name if category == ConnectionCategory.AzureOpenAI: return "AzureOpenAI" if category == ConnectionCategory.CognitiveSearch: return "CognitiveSearch" if category == ConnectionCategory.CognitiveService: kind = get_case_insensitive_key(metadata, "Kind") if kind == "Content Safety": return "AzureContentSafety" if kind == "Form Recognizer": return "FormRecognizer" raise UnknownConnectionType( message_format="Connection {name} is not recognized in PromptFlow, " "please make sure the connection is created in PromptFlow.", category=category, name=name, ) @classmethod def build_connection_dict_from_rest_object(cls, name, obj) -> dict: """ :type name: str :type obj: azure.ai.ml._restclient.v2023_06_01_preview.models.WorkspaceConnectionPropertiesV2BasicResource """ # Reference 1: https://msdata.visualstudio.com/Vienna/_git/vienna?path=/src/azureml-api/src/AccountRP/Contracts/WorkspaceConnection/WorkspaceConnectionDtoV2.cs&_a=blame&version=GBmaster # noqa: E501 # Reference 2: https://msdata.visualstudio.com/Vienna/_git/vienna?path=%2Fsrc%2Fazureml-api%2Fsrc%2FDesigner%2Fsrc%2FMiddleTier%2FMiddleTier%2FServices%2FPromptFlow%2FConnectionsManagement.cs&version=GBmaster&_a=contents # noqa: E501 # This connection type covers the generic ApiKey auth connection categories, for examples: # AzureOpenAI: # Category:= AzureOpenAI # AuthType:= ApiKey (as type discriminator) # Credentials:= {ApiKey} as <see cref="ApiKey"/> # Target:= {ApiBase} # # CognitiveService: # Category:= CognitiveService # AuthType:= ApiKey (as type discriminator) # Credentials:= {SubscriptionKey} as <see cref="ApiKey"/> # Target:= ServiceRegion={serviceRegion} # # CognitiveSearch: # Category:= CognitiveSearch # AuthType:= ApiKey (as type discriminator) # Credentials:= {Key} as <see cref="ApiKey"/> # Target:= {Endpoint} # # Use Metadata property bag for ApiType, ApiVersion, Kind and other metadata fields properties = obj.properties type_name = get_case_insensitive_key(properties.metadata, f"{FLOW_META_PREFIX}connection_type") type_name = cls.validate_and_fallback_connection_type(name, type_name, properties.category, properties.metadata) module = get_case_insensitive_key(properties.metadata, f"{FLOW_META_PREFIX}module", "promptflow.connections") # Note: Category is connectionType in MT, but type name should be class name, which is flowValueType in MT. # Handle old connections here, see details: https://github.com/Azure/promptflow/tree/main/connections type_name = f"{type_name}Connection" if not type_name.endswith("Connection") else type_name meta = {"type": type_name, "module": module} if properties.category == ConnectionCategory.AzureOpenAI: value = { "api_key": properties.credentials.key, "api_base": properties.target, "api_type": get_case_insensitive_key(properties.metadata, "ApiType"), "api_version": get_case_insensitive_key(properties.metadata, "ApiVersion"), } # Note: Resource id is required in some cloud scenario, which is not exposed on sdk/cli entity. resource_id = get_case_insensitive_key(properties.metadata, "ResourceId") if resource_id: value["resource_id"] = resource_id elif properties.category == ConnectionCategory.CognitiveSearch: value = { "api_key": properties.credentials.key, "api_base": properties.target, "api_version": get_case_insensitive_key(properties.metadata, "ApiVersion"), } elif properties.category == ConnectionCategory.CognitiveService: value = { "api_key": properties.credentials.key, "endpoint": properties.target, "api_version": get_case_insensitive_key(properties.metadata, "ApiVersion"), } elif properties.category == ConnectionCategory.CustomKeys: # Merge secrets from credentials.keys and other string fields from metadata value = { **properties.credentials.keys, **{k: v for k, v in properties.metadata.items() if not k.startswith(FLOW_META_PREFIX)}, } if type_name == CustomConnection.__name__: meta["secret_keys"] = list(properties.credentials.keys.keys()) else: raise UnknownConnectionType( message_format=( "Unknown connection {name} category {category}, " "please upgrade your promptflow sdk version and retry." ), category=properties.category, name=name, ) # Note: Filter empty values out to ensure default values can be picked when init class object. return {**meta, "value": {k: v for k, v in value.items() if v}} def build_connection_dict(self, name): return self._build_connection_dict( name, self._operation_scope.subscription_id, self._operation_scope.resource_group_name, self._operation_scope.workspace_name, self._credential, ) @classmethod def _convert_to_connection_dict(cls, conn_name, conn_data): try: rest_obj = WorkspaceConnectionPropertiesV2BasicResource.deserialize(conn_data) conn_dict = cls.build_connection_dict_from_rest_object(conn_name, rest_obj) return conn_dict except Exception as e: raise BuildConnectionError( message_format=f"Build connection dict for connection {{name}} failed with {e}.", name=conn_name, ) @classmethod def _build_connection_dict(cls, name, subscription_id, resource_group_name, workspace_name, credential) -> dict: """ :type name: str :type subscription_id: str :type resource_group_name: str :type workspace_name: str :type credential: azure.identity.TokenCredential """ url = GET_CONNECTION_URL.format( sub=subscription_id, rg=resource_group_name, ws=workspace_name, name=name, ) try: rest_obj: WorkspaceConnectionPropertiesV2BasicResource = cls.open_url( get_arm_token(credential=credential), url=url, action="listsecrets", method="POST", model=WorkspaceConnectionPropertiesV2BasicResource, ) except AccessDeniedError: auth_error_message = ( "Access denied to list workspace secret due to invalid authentication. " "Please ensure you have gain RBAC role 'Azure Machine Learning Workspace Connection Secrets Reader' " "for current workspace, and wait for a few minutes to make sure the new role takes effect. " ) raise OpenURLUserAuthenticationError(message=auth_error_message) except ClientAuthenticationError as e: raise UserErrorException(target=ErrorTarget.CONTROL_PLANE_SDK, message=str(e), error=e) except Exception as e: raise SystemErrorException(target=ErrorTarget.CONTROL_PLANE_SDK, message=str(e), error=e) try: return cls.build_connection_dict_from_rest_object(name, rest_obj) except Exception as e: raise BuildConnectionError( message_format=f"Build connection dict for connection {{name}} failed with {e}.", name=name, ) class AccessDeniedError(UserErrorException): """Exception raised when run info can not be found in storage""" def __init__(self, operation: str, target: ErrorTarget): super().__init__(message=f"Access is denied to perform operation {operation!r}", target=target) class OpenURLFailed(SystemErrorException): def __init__(self, **kwargs): super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) class BuildConnectionError(SystemErrorException): def __init__(self, **kwargs): super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) class UserAuthenticationError(UserErrorException): """Exception raised when user authentication failed""" pass class OpenURLUserAuthenticationError(UserAuthenticationError): def __init__(self, **kwargs): super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) class OpenURLFailedUserError(UserErrorException): def __init__(self, **kwargs): super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) class UnknownConnectionType(UserErrorException): def __init__(self, **kwargs): super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs)
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore from ._flow_operations import FlowOperations from ._run_operations import RunOperations __all__ = ["FlowOperations", "RunOperations"]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_async_run_downloader.py
import asyncio import contextvars import functools import json from pathlib import Path from typing import Optional, Union import httpx from azure.core.exceptions import HttpResponseError from azure.storage.blob.aio import BlobServiceClient from promptflow._sdk._constants import DEFAULT_ENCODING, DownloadedRun from promptflow._sdk._errors import DownloadInternalError, RunNotFoundError, RunOperationError from promptflow._sdk.entities import Run from promptflow._utils.logger_utils import get_cli_sdk_logger from promptflow.exceptions import UserErrorException logger = get_cli_sdk_logger() class AsyncRunDownloader: """Download run results from the service asynchronously. :param run: The run id. :type run: str :param run_ops: The run operations. :type run_ops: ~promptflow.azure.operations.RunOperations :param output_folder: The output folder to save the run results. :type output_folder: Union[Path, str] """ IGNORED_PATTERN = ["__pycache__"] def __init__(self, run: str, run_ops: "RunOperations", output_folder: Union[str, Path]) -> None: self.run = run self.run_ops = run_ops self.datastore = run_ops._workspace_default_datastore self.output_folder = Path(output_folder) self.blob_service_client = self._init_blob_service_client() self._use_flow_outputs = False # old runtime does not write debug_info output asset, use flow_outputs instead def _init_blob_service_client(self): logger.debug("Initializing blob service client.") account_url = f"{self.datastore.account_name}.blob.{self.datastore.endpoint}" return BlobServiceClient(account_url=account_url, credential=self.run_ops._credential) async def download(self) -> str: """Download the run results asynchronously.""" error_msg_prefix = f"Failed to download run {self.run!r}" try: # pass verify=False to client to disable SSL verification. # Source: https://github.com/encode/httpx/issues/1331 async with httpx.AsyncClient(verify=False) as client: tasks = [ # put async functions in tasks to run in coroutines self._download_artifacts_and_snapshot(client), # below functions are actually synchronous functions in order to reuse code # and use thread pool to avoid blocking the event loop to_thread(self._download_run_metrics), to_thread(self._download_run_logs), ] await asyncio.gather(*tasks) except RunNotFoundError as e: raise RunOperationError(f"{error_msg_prefix}. Error: {e}") from e except HttpResponseError as e: if e.status_code == 403: raise RunOperationError( f"{error_msg_prefix}. User does not have permission to perform this operation on storage account " f"{self.datastore.account_name!r} container {self.datastore.container_name!r}. " f"Original azure blob error: {str(e)}" ) else: raise DownloadInternalError(f"{error_msg_prefix}. Error: {e}") from e except Exception as e: raise DownloadInternalError(f"{error_msg_prefix}. Error: {e}") from e return self.output_folder.resolve().as_posix() async def _download_artifacts_and_snapshot(self, httpx_client: httpx.AsyncClient): run_data = await self._get_run_data_from_run_history(httpx_client) logger.debug("Parsing run data from run history to get necessary information.") # extract necessary information from run data snapshot_id = run_data["runMetadata"]["properties"]["azureml.promptflow.snapshot_id"] output_data = run_data["runMetadata"]["outputs"].get("debug_info", None) if output_data is None: logger.warning( "Downloading run '%s' but the 'debug_info' output assets is not available, " "maybe because the job ran on old version runtime, trying to get `flow_outputs` output asset instead.", self.run, ) self._use_flow_outputs = True output_data = run_data["runMetadata"]["outputs"].get("flow_outputs", None) output_asset_id = output_data["assetId"] # save run metadata to run_metadata.json logger.debug("Saving the run meta data.") run_data = self.run_ops._refine_run_data_from_run_history(run_data) run_data = Run._from_run_history_entity(run_data) with open(self.output_folder / DownloadedRun.RUN_METADATA_FILE_NAME, "w", encoding=DEFAULT_ENCODING) as f: json.dump(run_data._to_dict(), f, ensure_ascii=False) async with self.blob_service_client: container_name = self.datastore.container_name logger.debug("Getting container client (%s) from workspace default datastore.", container_name) container_client = self.blob_service_client.get_container_client(container_name) async with container_client: tasks = [ self._download_flow_artifacts(httpx_client, container_client, output_asset_id), self._download_snapshot(httpx_client, container_client, snapshot_id), ] await asyncio.gather(*tasks) async def _get_run_data_from_run_history(self, client: httpx.AsyncClient): """Get the run data from the run history.""" logger.debug("Getting run data from run history.") headers = self.run_ops._get_headers() url = self.run_ops._run_history_endpoint_url + "/rundata" payload = { "runId": self.run, "selectRunMetadata": True, "selectRunDefinition": True, "selectJobSpecification": True, } error_msg_prefix = "Failed to get run data from run history" try: response = await client.post(url, headers=headers, json=payload) except Exception as e: raise DownloadInternalError(f"{error_msg_prefix}. Error: {e}") from e else: if response.status_code == 200: return response.json() elif response.status_code == 404: raise RunNotFoundError(f"{error_msg_prefix}. Run {self.run!r} not found.") else: raise DownloadInternalError( f"{error_msg_prefix}. Code: {response.status_code}. Reason: {response.reason_phrase}" ) def _download_run_metrics( self, ): """Download the run metrics.""" logger.debug("Downloading run metrics.") metrics = self.run_ops.get_metrics(self.run) with open(self.output_folder / DownloadedRun.METRICS_FILE_NAME, "w", encoding=DEFAULT_ENCODING) as f: json.dump(metrics, f, ensure_ascii=False) logger.debug("Downloaded run metrics.") async def _download_flow_artifacts(self, httpx_client: httpx.AsyncClient, container_client, output_data): """Download the output data.""" asset_path = await self._get_asset_path(httpx_client, output_data) await self._download_blob_folder_from_asset_path(container_client, asset_path) async def _download_blob_folder_from_asset_path( self, container_client, asset_path: str, local_folder: Optional[Path] = None ): """Download the blob data from the data path.""" logger.debug("Downloading all blobs from data path prefix '%s'", asset_path) if local_folder is None: local_folder = self.output_folder tasks = [] async for blob in container_client.list_blobs(name_starts_with=asset_path): blob_client = container_client.get_blob_client(blob.name) relative_path = Path(blob.name).relative_to(asset_path) local_path = local_folder / relative_path tasks.append(self._download_single_blob(blob_client, local_path)) await asyncio.gather(*tasks) async def _download_single_blob(self, blob_client, local_path: Optional[Path] = None): """Download a single blob.""" if local_path is None: local_path = Path(self.output_folder / blob_client.blob_name) elif local_path.exists(): raise UserErrorException(f"Local file {local_path.resolve().as_posix()!r} already exists.") # ignore some files for item in self.IGNORED_PATTERN: if item in blob_client.blob_name: logger.warning( "Ignoring file '%s' because it matches the ignored pattern '%s'", local_path.as_posix(), item ) return None logger.debug("Downloading blob '%s' to local path '%s'", blob_client.blob_name, local_path.resolve().as_posix()) local_path.parent.mkdir(parents=True, exist_ok=True) async with blob_client: with open(local_path, "wb") as f: stream = await blob_client.download_blob() async for chunk in stream.chunks(): f.write(chunk) return local_path async def _download_snapshot(self, httpx_client: httpx.AsyncClient, container_client, snapshot_id): """Download the flow snapshot.""" snapshot_urls = await self._get_flow_snapshot_urls(httpx_client, snapshot_id) logger.debug("Downloading all snapshot blobs from snapshot urls.") tasks = [] for url in snapshot_urls: blob_name = url.split(self.datastore.container_name)[-1].lstrip("/") blob_client = container_client.get_blob_client(blob_name) relative_path = url.split(self.run)[-1].lstrip("/") local_path = Path(self.output_folder / DownloadedRun.SNAPSHOT_FOLDER / relative_path) tasks.append(self._download_single_blob(blob_client, local_path)) await asyncio.gather(*tasks) async def _get_flow_snapshot_urls(self, httpx_client: httpx.AsyncClient, snapshot_id): logger.debug("Getting flow snapshot blob urls from snapshot id with calling to content service.") headers = self.run_ops._get_headers() endpoint = self.run_ops._run_history_endpoint_url.replace("/history/v1.0", "/content/v2.0") url = endpoint + "/snapshots/sas" payload = { "snapshotOrAssetId": snapshot_id, } error_msg_prefix = ( f"Failed to download flow snapshots with snapshot id {snapshot_id}, " f"because the client failed to retrieve data from content service" ) try: response = await httpx_client.post(url, headers=headers, json=payload) except Exception as e: raise DownloadInternalError(f"{error_msg_prefix}. Error: {e}") from e else: if response.status_code == 200: return self._parse_snapshot_response(response.json()) elif response.status_code == 404: raise DownloadInternalError(f"{error_msg_prefix}. Error: Snapshot id not found.") else: raise DownloadInternalError( f"{error_msg_prefix}. Code: {response.status_code}. Reason: {response.reason_phrase}" ) async def _get_asset_path(self, client: httpx.AsyncClient, asset_id): """Get the asset path from asset id.""" logger.debug("Getting asset path from asset id with calling to data service.") headers = self.run_ops._get_headers() endpoint = self.run_ops._run_history_endpoint_url.replace("/history", "/data") url = endpoint + "/dataversion/getByAssetId" payload = { "value": asset_id, } error_msg_prefix = "Failed to download flow artifacts due to failed to retrieve data from data service" try: response = await client.post(url, headers=headers, json=payload) except Exception as e: raise DownloadInternalError(f"{error_msg_prefix}. Error: {e}") from e if response.status_code != 200: raise DownloadInternalError( f"{error_msg_prefix}. Code: {response.status_code}. Reason: {response.reason_phrase}" ) response_data = response.json() data_path = response_data["dataVersion"]["dataUri"].split("/paths/")[-1] if self._use_flow_outputs: data_path = data_path.replace("flow_outputs", "flow_artifacts") return data_path def _parse_snapshot_response(self, response: dict): """Parse the snapshot response.""" urls = [] if response["absoluteUrl"]: urls.append(response["absoluteUrl"]) for value in response["children"].values(): urls += self._parse_snapshot_response(value) return urls def _download_run_logs(self): """Download the run logs.""" logger.debug("Downloading run logs.") logs = self.run_ops._get_log(self.run) with open(self.output_folder / DownloadedRun.LOGS_FILE_NAME, "w", encoding=DEFAULT_ENCODING) as f: f.write(logs) logger.debug("Downloaded run logs.") @classmethod def _from_run_operations(cls, run_ops: "RunOperations", run: str, output_folder: Union[str, Path]): """Create an instance from run operations.""" from azure.ai.ml.entities._datastore.azure_storage import AzureBlobDatastore datastore = run_ops._workspace_default_datastore if isinstance(datastore, AzureBlobDatastore): return cls(run=run, run_ops=run_ops, output_folder=output_folder) else: raise UserErrorException( f"Cannot download run {run!r} because the workspace default datastore is not supported. Supported ones " f"are ['AzureBlobDatastore'], got {type(datastore).__name__!r}." ) async def to_thread(func, /, *args, **kwargs): # this is copied from asyncio.to_thread() in Python 3.9 # as it is not available in Python 3.8, which is the minimum supported version of promptflow loop = asyncio.get_running_loop() ctx = contextvars.copy_context() func_call = functools.partial(ctx.run, func, *args, **kwargs) return await loop.run_in_executor(None, func_call)
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_run_operations.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import asyncio import concurrent import copy import hashlib import json import os import shutil import sys import time from concurrent.futures import ThreadPoolExecutor from functools import cached_property from pathlib import Path from typing import Any, Dict, List, Optional, Union import requests from azure.ai.ml._artifacts._artifact_utilities import _upload_and_generate_remote_uri from azure.ai.ml._scope_dependent_operations import ( OperationConfig, OperationsContainer, OperationScope, _ScopeDependentOperations, ) from azure.ai.ml.constants._common import AssetTypes, AzureMLResourceType from azure.ai.ml.entities import Workspace from azure.ai.ml.operations import DataOperations from azure.ai.ml.operations._operation_orchestrator import OperationOrchestrator from promptflow._constants import LANGUAGE_KEY, FlowLanguage from promptflow._sdk._constants import ( LINE_NUMBER, MAX_RUN_LIST_RESULTS, MAX_SHOW_DETAILS_RESULTS, PROMPT_FLOW_DIR_NAME, PROMPT_FLOW_RUNS_DIR_NAME, REGISTRY_URI_PREFIX, VIS_PORTAL_URL_TMPL, AzureRunTypes, ListViewType, RunDataKeys, RunHistoryKeys, RunStatus, ) from promptflow._sdk._errors import InvalidRunStatusError, RunNotFoundError, RunOperationParameterError from promptflow._sdk._telemetry import ActivityType, WorkspaceTelemetryMixin, monitor_operation from promptflow._sdk._utils import in_jupyter_notebook, incremental_print, is_remote_uri, print_red_error from promptflow._sdk.entities import Run from promptflow._utils.async_utils import async_run_allowing_running_loop from promptflow._utils.flow_utils import get_flow_lineage_id from promptflow._utils.logger_utils import get_cli_sdk_logger from promptflow.azure._constants._flow import AUTOMATIC_RUNTIME, AUTOMATIC_RUNTIME_NAME, CLOUD_RUNS_PAGE_SIZE from promptflow.azure._load_functions import load_flow from promptflow.azure._restclient.flow_service_caller import FlowServiceCaller from promptflow.azure._utils.gerneral import get_authorization, get_user_alias_from_credential from promptflow.azure.operations._flow_operations import FlowOperations from promptflow.exceptions import UserErrorException RUNNING_STATUSES = RunStatus.get_running_statuses() logger = get_cli_sdk_logger() class RunRequestException(Exception): """RunRequestException.""" def __init__(self, message): super().__init__(message) class RunOperations(WorkspaceTelemetryMixin, _ScopeDependentOperations): """RunOperations that can manage runs. You should not instantiate this class directly. Instead, you should create an :class:`~promptflow.azure.PFClient` instance and this operation is available as the instance's attribute. """ def __init__( self, operation_scope: OperationScope, operation_config: OperationConfig, all_operations: OperationsContainer, flow_operations: FlowOperations, credential, service_caller: FlowServiceCaller, workspace: Workspace, **kwargs: Dict, ): super().__init__( operation_scope=operation_scope, operation_config=operation_config, workspace_name=operation_scope.workspace_name, subscription_id=operation_scope.subscription_id, resource_group_name=operation_scope.resource_group_name, ) self._operation_scope = operation_scope self._all_operations = all_operations self._service_caller = service_caller self._workspace = workspace self._credential = credential self._flow_operations = flow_operations self._orchestrators = OperationOrchestrator(self._all_operations, self._operation_scope, self._operation_config) self._workspace_default_datastore = self._datastore_operations.get_default() @property def _data_operations(self): return self._all_operations.get_operation(AzureMLResourceType.DATA, lambda x: isinstance(x, DataOperations)) @property def _datastore_operations(self) -> "DatastoreOperations": return self._all_operations.all_operations[AzureMLResourceType.DATASTORE] @cached_property def _run_history_endpoint_url(self): """Get the endpoint url for the workspace.""" endpoint = self._service_caller._service_endpoint return endpoint + "history/v1.0" + self._service_caller._common_azure_url_pattern def _get_run_portal_url(self, run_id: str): """Get the portal url for the run.""" portal_url, run_info = None, None try: run_info = self._get_run_from_pfs(run_id=run_id) except Exception as e: logger.warning(f"Failed to get run portal url from pfs for run {run_id!r}: {str(e)}") if run_info and hasattr(run_info, "studio_portal_endpoint"): portal_url = run_info.studio_portal_endpoint return portal_url def _get_headers(self): custom_header = { "Authorization": get_authorization(credential=self._credential), "Content-Type": "application/json", } return custom_header @monitor_operation(activity_name="pfazure.runs.create_or_update", activity_type=ActivityType.PUBLICAPI) def create_or_update(self, run: Run, **kwargs) -> Run: """Create or update a run. :param run: Run object to create or update. :type run: ~promptflow.entities.Run :return: Run object created or updated. :rtype: ~promptflow.entities.Run """ stream = kwargs.pop("stream", False) reset = kwargs.pop("reset_runtime", False) # validate the run object run._validate_for_run_create_operation() rest_obj = self._resolve_dependencies_in_parallel(run=run, runtime=kwargs.get("runtime"), reset=reset) self._service_caller.submit_bulk_run( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, body=rest_obj, ) if in_jupyter_notebook(): print(f"Portal url: {self._get_run_portal_url(run_id=run.name)}") if stream: self.stream(run=run.name) return self.get(run=run.name) @monitor_operation(activity_name="pfazure.runs.list", activity_type=ActivityType.PUBLICAPI) def list( self, max_results: int = MAX_RUN_LIST_RESULTS, list_view_type: ListViewType = ListViewType.ACTIVE_ONLY, **kwargs ) -> List[Run]: """List runs in the workspace. :param max_results: The max number of runs to return, defaults to 50, max is 100 :type max_results: int :param list_view_type: The list view type, defaults to ListViewType.ACTIVE_ONLY :type list_view_type: ListViewType :return: The list of runs. :rtype: List[~promptflow.entities.Run] """ if not isinstance(max_results, int) or max_results < 0: raise RunOperationParameterError(f"'max_results' must be a positive integer, got {max_results!r}") headers = self._get_headers() filter_archived = [] if list_view_type == ListViewType.ACTIVE_ONLY: filter_archived = ["false"] elif list_view_type == ListViewType.ARCHIVED_ONLY: filter_archived = ["true"] elif list_view_type == ListViewType.ALL: filter_archived = ["true", "false"] else: raise RunOperationParameterError( f"Invalid list view type: {list_view_type!r}, expecting one of ['ActiveOnly', 'ArchivedOnly', 'All']" ) pay_load = { "filters": [ {"field": "type", "operator": "eq", "values": ["runs"]}, {"field": "annotations/archived", "operator": "eq", "values": filter_archived}, { "field": "properties/runType", "operator": "contains", "values": [ AzureRunTypes.BATCH, AzureRunTypes.EVALUATION, AzureRunTypes.PAIRWISE_EVALUATE, ], }, ], "freeTextSearch": "", "order": [{"direction": "Desc", "field": "properties/creationContext/createdTime"}], # index service can return 100 results at most "pageSize": min(max_results, 100), "skip": 0, "includeTotalResultCount": True, "searchBuilder": "AppendPrefix", } endpoint = self._run_history_endpoint_url.replace("/history", "/index") url = endpoint + "/entities" response = requests.post(url, headers=headers, json=pay_load) if response.status_code == 200: entities = json.loads(response.text) runs = entities["value"] else: raise RunRequestException( f"Failed to get runs from service. Code: {response.status_code}, text: {response.text}" ) refined_runs = [] for run in runs: refined_runs.append(Run._from_index_service_entity(run)) return refined_runs @monitor_operation(activity_name="pfazure.runs.get_metrics", activity_type=ActivityType.PUBLICAPI) def get_metrics(self, run: Union[str, Run], **kwargs) -> dict: """Get the metrics from the run. :param run: The run or the run object :type run: Union[str, ~promptflow.entities.Run] :return: The metrics :rtype: dict """ run = Run._validate_and_return_run_name(run) self._check_cloud_run_completed(run_name=run) metrics = self._get_metrics_from_metric_service(run) return metrics @monitor_operation(activity_name="pfazure.runs.get_details", activity_type=ActivityType.PUBLICAPI) def get_details( self, run: Union[str, Run], max_results: int = MAX_SHOW_DETAILS_RESULTS, all_results: bool = False, **kwargs ) -> "DataFrame": """Get the details from the run. .. note:: If `all_results` is set to True, `max_results` will be overwritten to sys.maxsize. :param run: The run name or run object :type run: Union[str, ~promptflow.sdk.entities.Run] :param max_results: The max number of runs to return, defaults to 100 :type max_results: int :param all_results: Whether to return all results, defaults to False :type all_results: bool :raises RunOperationParameterError: If `max_results` is not a positive integer. :return: The details data frame. :rtype: pandas.DataFrame """ from pandas import DataFrame # if all_results is True, set max_results to sys.maxsize if all_results: max_results = sys.maxsize if not isinstance(max_results, int) or max_results < 1: raise RunOperationParameterError(f"'max_results' must be a positive integer, got {max_results!r}") run = Run._validate_and_return_run_name(run) self._check_cloud_run_completed(run_name=run) child_runs = self._get_flow_runs_pagination(run, max_results=max_results) inputs, outputs = self._get_inputs_outputs_from_child_runs(child_runs) # if there is any line run failed, the number of inputs and outputs will be different # this will result in pandas raising ValueError, so we need to handle mismatched case # if all line runs are failed, no need to fill the outputs if len(outputs) > 0: # get total number of line runs from inputs num_line_runs = len(list(inputs.values())[0]) num_outputs = len(list(outputs.values())[0]) if num_line_runs > num_outputs: # build full set with None as placeholder filled_outputs = {} output_keys = list(outputs.keys()) for k in output_keys: filled_outputs[k] = [None] * num_line_runs filled_outputs[LINE_NUMBER] = list(range(num_line_runs)) for i in range(num_outputs): line_number = outputs[LINE_NUMBER][i] for k in output_keys: filled_outputs[k][line_number] = outputs[k][i] # replace defective outputs with full set outputs = copy.deepcopy(filled_outputs) data = {} columns = [] for k in inputs: new_k = f"inputs.{k}" data[new_k] = copy.deepcopy(inputs[k]) columns.append(new_k) for k in outputs: new_k = f"outputs.{k}" data[new_k] = copy.deepcopy(outputs[k]) columns.append(new_k) df = DataFrame(data).reindex(columns=columns) if f"outputs.{LINE_NUMBER}" in columns: df = df.set_index(f"outputs.{LINE_NUMBER}") return df def _check_cloud_run_completed(self, run_name: str) -> bool: """Check if the cloud run is completed.""" run = self.get(run=run_name) run._check_run_status_is_completed() def _get_flow_runs_pagination(self, name: str, max_results: int) -> List[dict]: # call childRuns API with pagination to avoid PFS OOM # different from UX, run status should be completed here flow_runs = [] start_index, end_index = 0, CLOUD_RUNS_PAGE_SIZE - 1 while start_index < max_results: current_flow_runs = self._service_caller.get_child_runs( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, flow_run_id=name, start_index=start_index, end_index=end_index, ) # no data in current page if len(current_flow_runs) == 0: break start_index, end_index = start_index + CLOUD_RUNS_PAGE_SIZE, end_index + CLOUD_RUNS_PAGE_SIZE flow_runs += current_flow_runs return flow_runs[0:max_results] def _extract_metrics_from_metric_service_response(self, values) -> dict: """Get metrics from the metric service response.""" refined_metrics = {} metric_list = values.get("value", []) if not metric_list: return refined_metrics for metric in metric_list: metric_name = metric["name"] if self._is_system_metric(metric_name): continue refined_metrics[metric_name] = metric["value"][0]["data"][metric_name] return refined_metrics def _get_metrics_from_metric_service(self, run_id) -> dict: """Get the metrics from metric service.""" headers = self._get_headers() # refer to MetricController: https://msdata.visualstudio.com/Vienna/_git/vienna?path=/src/azureml-api/src/Metric/EntryPoints/Api/Controllers/MetricController.cs&version=GBmaster # noqa: E501 endpoint = self._run_history_endpoint_url.replace("/history/v1.0", "/metric/v2.0") url = endpoint + f"/runs/{run_id}/lastvalues" response = requests.post(url, headers=headers, json={}) if response.status_code == 200: values = response.json() return self._extract_metrics_from_metric_service_response(values) else: raise RunRequestException( f"Failed to get metrics from service. Code: {response.status_code}, text: {response.text}" ) @staticmethod def _is_system_metric(metric: str) -> bool: """Check if the metric is system metric. Current we have some system metrics like: __pf__.lines.completed, __pf__.lines.bypassed, __pf__.lines.failed, __pf__.nodes.xx.completed """ return ( metric.endswith(".completed") or metric.endswith(".bypassed") or metric.endswith(".failed") or metric.endswith(".is_completed") ) @monitor_operation(activity_name="pfazure.runs.get", activity_type=ActivityType.PUBLICAPI) def get(self, run: Union[str, Run], **kwargs) -> Run: """Get a run. :param run: The run name :type run: Union[str, ~promptflow.entities.Run] :return: The run object :rtype: ~promptflow.entities.Run """ run = Run._validate_and_return_run_name(run) return self._get_run_from_run_history(flow_run_id=run, **kwargs) def _get_run_from_run_history(self, flow_run_id, original_form=False, **kwargs): """Get run info from run history""" headers = self._get_headers() url = self._run_history_endpoint_url + "/rundata" payload = { "runId": flow_run_id, "selectRunMetadata": True, "selectRunDefinition": True, "selectJobSpecification": True, } response = requests.post(url, headers=headers, json=payload) if response.status_code == 200: run = response.json() # if original_form is True, return the original run data from run history, mainly for test use if original_form: return run run_data = self._refine_run_data_from_run_history(run) run = Run._from_run_history_entity(run_data) return run elif response.status_code == 404: raise RunNotFoundError(f"Run {flow_run_id!r} not found.") else: raise RunRequestException( f"Failed to get run from service. Code: {response.status_code}, text: {response.text}" ) def _refine_run_data_from_run_history(self, run_data: dict) -> dict: """Refine the run data from run history. Generate the portal url, input and output value from run history data. """ run_data = run_data[RunHistoryKeys.RunMetaData] # add cloud run url run_data[RunDataKeys.PORTAL_URL] = self._get_run_portal_url(run_id=run_data["runId"]) # get input and output value # TODO: Unify below values to the same pattern - azureml://xx properties = run_data["properties"] input_data = properties.pop("azureml.promptflow.input_data", None) input_run_id = properties.pop("azureml.promptflow.input_run_id", None) output_data = run_data["outputs"] if output_data: output_data = output_data.get("flow_outputs", {}).get("assetId", None) run_data[RunDataKeys.DATA] = input_data run_data[RunDataKeys.RUN] = input_run_id run_data[RunDataKeys.OUTPUT] = output_data return run_data def _get_run_from_index_service(self, flow_run_id, **kwargs): """Get run info from index service""" headers = self._get_headers() payload = { "filters": [ {"field": "type", "operator": "eq", "values": ["runs"]}, {"field": "annotations/archived", "operator": "eq", "values": ["false"]}, {"field": "properties/runId", "operator": "eq", "values": [flow_run_id]}, ], "order": [{"direction": "Desc", "field": "properties/startTime"}], "pageSize": 50, } endpoint = self._run_history_endpoint_url.replace("/history", "/index") url = endpoint + "/entities" response = requests.post(url, json=payload, headers=headers) if response.status_code == 200: runs = response.json().get("value", None) if not runs: raise RunRequestException( f"Could not found run with run id {flow_run_id!r}, please double check the run id and try again." ) run = runs[0] return Run._from_index_service_entity(run) else: raise RunRequestException( f"Failed to get run metrics from service. Code: {response.status_code}, text: {response.text}" ) def _get_run_from_pfs(self, run_id, **kwargs): """Get run info from pfs""" return self._service_caller.get_flow_run( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, flow_run_id=run_id, ) @monitor_operation(activity_name="pfazure.runs.archive", activity_type=ActivityType.PUBLICAPI) def archive(self, run: Union[str, Run]) -> Run: """Archive a run. :param run: The run name or run object :type run: Union[str, ~promptflow.entities.Run] :return: The run object :rtype: ~promptflow.entities.Run """ run = Run._validate_and_return_run_name(run) payload = { RunHistoryKeys.HIDDEN: True, } return self._modify_run_in_run_history(run_id=run, payload=payload) @monitor_operation(activity_name="pfazure.runs.restore", activity_type=ActivityType.PUBLICAPI) def restore(self, run: Union[str, Run]) -> Run: """Restore a run. :param run: The run name or run object :type run: Union[str, ~promptflow.entities.Run] :return: The run object :rtype: ~promptflow.entities.Run """ run = Run._validate_and_return_run_name(run) payload = { RunHistoryKeys.HIDDEN: False, } return self._modify_run_in_run_history(run_id=run, payload=payload) def _get_log(self, flow_run_id: str) -> str: return self._service_caller.caller.bulk_runs.get_flow_run_log_content( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, flow_run_id=flow_run_id, headers=self._get_headers(), ) @monitor_operation(activity_name="pfazure.runs.update", activity_type=ActivityType.PUBLICAPI) def update( self, run: Union[str, Run], display_name: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, ) -> Optional[Run]: """Update a run. May update the display name, description or tags. .. note:: - Display name and description are strings, and tags is a dictionary of key-value pairs, both key and value are also strings. - Tags is a dictionary of key-value pairs. Updating tags will overwrite the existing key-value pair, but will not delete the existing key-value pairs. :param run: The run name or run object :type run: Union[str, ~promptflow.entities.Run] :param display_name: The display name :type display_name: Optional[str] :param description: The description :type description: Optional[str] :param tags: The tags :type tags: Optional[Dict[str, str]] :raises UpdateRunError: If nothing or wrong type values provided to update the run. :return: The run object :rtype: Optional[~promptflow.entities.Run] """ run = Run._validate_and_return_run_name(run) if display_name is None and description is None and tags is None: logger.warning("Nothing provided to update the run.") return None payload = {} if isinstance(display_name, str): payload["displayName"] = display_name elif display_name is not None: logger.warning(f"Display name must be a string, got {type(display_name)!r}: {display_name!r}.") if isinstance(description, str): payload["description"] = description elif description is not None: logger.warning(f"Description must be a string, got {type(description)!r}: {description!r}.") # check if the tags type is Dict[str, str] if isinstance(tags, dict) and all( isinstance(key, str) and isinstance(value, str) for key, value in tags.items() ): payload["tags"] = tags elif tags is not None: logger.warning(f"Tags type must be 'Dict[str, str]', got non-dict or non-string key/value in tags: {tags}.") return self._modify_run_in_run_history(run_id=run, payload=payload) @monitor_operation(activity_name="pfazure.runs.stream", activity_type=ActivityType.PUBLICAPI) def stream(self, run: Union[str, Run], raise_on_error: bool = True) -> Run: """Stream the logs of a run. :param run: The run name or run object :type run: Union[str, ~promptflow.entities.Run] :param raise_on_error: Raises an exception if a run fails or canceled. :type raise_on_error: bool :return: The run object :rtype: ~promptflow.entities.Run """ run = self.get(run=run) # TODO: maybe we need to make this configurable file_handler = sys.stdout # different from Azure ML job, flow job can run very fast, so it might not print anything; # use below variable to track this behavior, and at least print something to the user. try: printed = 0 stream_count = 0 start = time.time() while run.status in RUNNING_STATUSES or run.status == RunStatus.FINALIZING: file_handler.flush() stream_count += 1 # print prompt every 3 times, in case there is no log printed if stream_count % 3 == 0: # print prompt every 3 times file_handler.write(f"(Run status is {run.status!r}, continue streaming...)\n") # if the run is not started for 5 minutes, print an error message and break the loop if run.status == RunStatus.NOT_STARTED: current = time.time() if current - start > 300: file_handler.write( f"The run {run.name!r} is in status 'NotStarted' for 5 minutes, streaming is stopped." "Please make sure you are using the latest runtime.\n" ) break available_logs = self._get_log(flow_run_id=run.name) printed = incremental_print(available_logs, printed, file_handler) time.sleep(10) run = self.get(run=run.name) # ensure all logs are printed file_handler.flush() available_logs = self._get_log(flow_run_id=run.name) incremental_print(available_logs, printed, file_handler) file_handler.write("======= Run Summary =======\n") duration = None if run._start_time and run._end_time: duration = str(run._end_time - run._start_time) file_handler.write( f'Run name: "{run.name}"\n' f'Run status: "{run.status}"\n' f'Start time: "{run._start_time}"\n' f'Duration: "{duration}"\n' f'Run url: "{self._get_run_portal_url(run_id=run.name)}"' ) except KeyboardInterrupt: error_message = ( "The output streaming for the flow run was interrupted.\n" "But the run is still executing on the cloud.\n" ) print(error_message) if run.status == RunStatus.FAILED or run.status == RunStatus.CANCELED: if run.status == RunStatus.FAILED: try: error_message = run._error["error"]["message"] except Exception: # pylint: disable=broad-except error_message = "Run fails with unknown error." else: error_message = "Run is canceled." if raise_on_error: raise InvalidRunStatusError(error_message) else: print_red_error(error_message) return run def _resolve_data_to_asset_id(self, run: Run): # Skip if no data provided if run.data is None: return test_data = run.data def _get_data_type(_data): if os.path.isdir(_data): return AssetTypes.URI_FOLDER else: return AssetTypes.URI_FILE if is_remote_uri(test_data): # Pass through ARM id or remote url return test_data if os.path.exists(test_data): # absolute local path, upload, transform to remote url data_type = _get_data_type(test_data) test_data = _upload_and_generate_remote_uri( self._operation_scope, self._datastore_operations, test_data, datastore_name=self._workspace_default_datastore.name, show_progress=self._show_progress, ) if data_type == AssetTypes.URI_FOLDER and test_data and not test_data.endswith("/"): test_data = test_data + "/" else: raise ValueError( f"Local path {test_data!r} not exist. " "If it's remote data, only data with azureml prefix or remote url is supported." ) return test_data def _resolve_flow(self, run: Run): if run._use_remote_flow: return self._resolve_flow_definition_resource_id(run=run) flow = load_flow(run.flow) self._flow_operations._resolve_arm_id_or_upload_dependencies( flow=flow, # ignore .promptflow/dag.tools.json only for run submission scenario in python ignore_tools_json=flow._flow_dict.get(LANGUAGE_KEY, None) != FlowLanguage.CSharp, ) return flow.path def _get_session_id(self, flow): try: user_alias = get_user_alias_from_credential(self._credential) except Exception: # fall back to unknown user when failed to get credential. user_alias = "unknown_user" flow_id = get_flow_lineage_id(flow_dir=flow) session_id = f"{user_alias}_{flow_id}" # hash and truncate to avoid the session id getting too long # backend has a 64 bit limit for session id. # use hexdigest to avoid non-ascii characters in session id session_id = str(hashlib.sha256(session_id.encode()).hexdigest())[:48] return session_id def _get_inputs_outputs_from_child_runs(self, runs: List[Dict[str, Any]]): """Get the inputs and outputs from the child runs.""" inputs = {} outputs = {} outputs[LINE_NUMBER] = [] runs.sort(key=lambda x: x["index"]) # 1st loop, until have all outputs keys outputs_keys = [] for run in runs: run_outputs = run["output"] if isinstance(run_outputs, dict): for k in run_outputs: outputs_keys.append(k) break # 2nd complete loop, get values for run in runs: index, run_inputs, run_outputs = run["index"], run["inputs"], run["output"] # input should always available as a dict for k, v in run_inputs.items(): if k not in inputs: inputs[k] = [] inputs[k].append(v) # output outputs[LINE_NUMBER].append(index) # for failed line run, output is None, instead of a dict # in this case, we append an empty line if not isinstance(run_outputs, dict): for k in outputs_keys: if k == LINE_NUMBER: continue if k not in outputs: outputs[k] = [] outputs[k].append(None) else: for k, v in run_outputs.items(): if k not in outputs: outputs[k] = [] outputs[k].append(v) return inputs, outputs @monitor_operation(activity_name="pfazure.runs.visualize", activity_type=ActivityType.PUBLICAPI) def visualize(self, runs: Union[str, Run, List[str], List[Run]], **kwargs) -> None: """Visualize run(s) using Azure AI portal. :param runs: Names of the runs, or list of run objects. :type runs: Union[str, ~promptflow.sdk.entities.Run, List[str], List[~promptflow.sdk.entities.Run]] """ if not isinstance(runs, list): runs = [runs] validated_runs = [] for run in runs: run_name = Run._validate_and_return_run_name(run) validated_runs.append(run_name) subscription_id = self._operation_scope.subscription_id resource_group_name = self._operation_scope.resource_group_name workspace_name = self._operation_scope.workspace_name names = ",".join(validated_runs) portal_url = VIS_PORTAL_URL_TMPL.format( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, names=names, ) print(f"Web View: {portal_url}") def _resolve_automatic_runtime(self): logger.warning( f"You're using {AUTOMATIC_RUNTIME}, if it's first time you're using it, " "it may take a while to build runtime and you may see 'NotStarted' status for a while. " ) runtime_name = AUTOMATIC_RUNTIME_NAME return runtime_name def _resolve_runtime(self, run, flow_path, runtime): runtime = run._runtime or runtime # for remote flow case, use flow name as session id # for local flow case, use flow path to calculate session id session_id = run._flow_name if run._use_remote_flow else self._get_session_id(flow=flow_path) if runtime is None or runtime == AUTOMATIC_RUNTIME_NAME: runtime = self._resolve_automatic_runtime() elif not isinstance(runtime, str): raise TypeError(f"runtime should be a string, got {type(runtime)} for {runtime}") return runtime, session_id def _resolve_dependencies_in_parallel(self, run, runtime, reset=None): flow_path = run.flow with ThreadPoolExecutor() as pool: tasks = [ pool.submit(self._resolve_data_to_asset_id, run=run), pool.submit(self._resolve_flow, run=run), ] concurrent.futures.wait(tasks, return_when=concurrent.futures.ALL_COMPLETED) task_results = [task.result() for task in tasks] run.data = task_results[0] run.flow = task_results[1] runtime, session_id = self._resolve_runtime(run=run, flow_path=flow_path, runtime=runtime) rest_obj = run._to_rest_object() rest_obj.runtime_name = runtime rest_obj.session_id = session_id # TODO(2884482): support force reset & force install if runtime == "None": # HARD CODE for office scenario, use workspace default runtime when specified None rest_obj.runtime_name = None return rest_obj def _refine_payload_for_run_update(self, payload: dict, key: str, value, expected_type: type) -> dict: """Refine the payload for run update.""" if value is not None: payload[key] = value return payload def _modify_run_in_run_history(self, run_id: str, payload: dict) -> Run: """Modify run info in run history.""" headers = self._get_headers() url = self._run_history_endpoint_url + f"/runs/{run_id}/modify" response = requests.patch(url, headers=headers, json=payload) if response.status_code == 200: # the modify api returns different data format compared with get api, so we use get api here to # return standard Run object return self.get(run=run_id) else: raise RunRequestException( f"Failed to modify run in run history. Code: {response.status_code}, text: {response.text}" ) def _resolve_flow_definition_resource_id(self, run: Run): """Resolve the flow definition resource id.""" # for registry flow pattern, the flow uri can be passed as flow definition resource id directly if run.flow.startswith(REGISTRY_URI_PREFIX): return run.flow # for workspace flow pattern, generate the flow definition resource id workspace_id = self._workspace._workspace_id location = self._workspace.location return f"azureml://locations/{location}/workspaces/{workspace_id}/flows/{run._flow_name}" @monitor_operation(activity_name="pfazure.runs.download", activity_type=ActivityType.PUBLICAPI) def download( self, run: Union[str, Run], output: Optional[Union[str, Path]] = None, overwrite: Optional[bool] = False ) -> str: """Download the data of a run, including input, output, snapshot and other run information. .. note:: After the download is finished, you can use ``pf run create --source <run-info-local-folder>`` to register this run as a local run record, then you can use commands like ``pf run show/visualize`` to inspect the run just like a run that was created from local flow. :param run: The run name or run object :type run: Union[str, ~promptflow.entities.Run] :param output: The output directory. Default to be default to be "~/.promptflow/.runs" folder. :type output: Optional[str] :param overwrite: Whether to overwrite the existing run folder. Default to be False. :type overwrite: Optional[bool] :return: The run directory path :rtype: str """ import platform from promptflow.azure.operations._async_run_downloader import AsyncRunDownloader run = Run._validate_and_return_run_name(run) run_folder = self._validate_for_run_download(run=run, output=output, overwrite=overwrite) run_downloader = AsyncRunDownloader._from_run_operations(run_ops=self, run=run, output_folder=run_folder) if platform.system().lower() == "windows": # Reference: https://stackoverflow.com/questions/45600579/asyncio-event-loop-is-closed-when-getting-loop # On Windows seems to be a problem with EventLoopPolicy, use this snippet to work around it asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) async_run_allowing_running_loop(run_downloader.download) result_path = run_folder.resolve().as_posix() logger.info(f"Successfully downloaded run {run!r} to {result_path!r}.") return result_path def _validate_for_run_download(self, run: Union[str, Run], output: Optional[Union[str, Path]], overwrite): """Validate the run download parameters.""" run = Run._validate_and_return_run_name(run) # process the output path if output is None: # default to be "~/.promptflow/.runs" folder output_directory = Path.home() / PROMPT_FLOW_DIR_NAME / PROMPT_FLOW_RUNS_DIR_NAME else: output_directory = Path(output) # validate the run folder run_folder = output_directory / run if run_folder.exists(): if overwrite is True: logger.warning("Removing existing run folder %r.", run_folder.resolve().as_posix()) shutil.rmtree(run_folder) else: raise UserErrorException( f"Run folder {run_folder.resolve().as_posix()!r} already exists, please specify a new output path " f"or set the overwrite flag to be true." ) # check the run status, only download the completed run run = self.get(run=run) if run.status != RunStatus.COMPLETED: raise UserErrorException( f"Can only download the run with status {RunStatus.COMPLETED!r} " f"while {run.name!r}'s status is {run.status!r}." ) run_folder.mkdir(parents=True) return run_folder @monitor_operation(activity_name="pfazure.runs.cancel", activity_type=ActivityType.PUBLICAPI) def cancel(self, run: Union[str, Run], **kwargs) -> None: """Cancel a run. :param run: The run name or run object :type run: Union[str, ~promptflow.entities.Run] """ run = Run._validate_and_return_run_name(run) self._service_caller.cancel_flow_run( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, flow_run_id=run, )
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_fileshare_storeage_helper.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import os from collections import defaultdict from functools import cached_property from multiprocessing import Lock from pathlib import Path from typing import Any, Dict, Optional from azure.ai.ml._artifacts._fileshare_storage_helper import FileStorageClient from azure.ai.ml._utils._asset_utils import ( DirectoryUploadProgressBar, FileUploadProgressBar, IgnoreFile, get_directory_size, ) from azure.core.exceptions import ResourceExistsError from azure.storage.fileshare import DirectoryProperties, ShareDirectoryClient from promptflow._sdk._vendor import get_upload_files_from_folder from promptflow.azure._constants._flow import PROMPTFLOW_FILE_SHARE_DIR from promptflow.azure._utils.gerneral import get_user_alias_from_credential uploading_lock = defaultdict(Lock) class FlowFileStorageClient(FileStorageClient): def __init__(self, credential: str, file_share_name: str, account_url: str, azure_cred): super().__init__(credential=credential, file_share_name=file_share_name, account_url=account_url) try: user_alias = get_user_alias_from_credential(azure_cred) except Exception: # fall back to unknown user when failed to get credential. user_alias = "unknown_user" self._user_alias = user_alias # TODO: update this after we finalize the design for flow file storage client # create user folder if not exist for directory_path in ["Users", f"Users/{user_alias}", f"Users/{user_alias}/{PROMPTFLOW_FILE_SHARE_DIR}"]: self.directory_client = ShareDirectoryClient( account_url=account_url, credential=credential, share_name=file_share_name, directory_path=directory_path, ) # try to create user folder if not exist try: self.directory_client.create_directory() except ResourceExistsError: pass @cached_property def file_share_prefix(self) -> str: return f"Users/{self._user_alias}/{PROMPTFLOW_FILE_SHARE_DIR}" def upload( self, source: str, name: str, version: str, ignore_file: IgnoreFile = IgnoreFile(None), asset_hash: Optional[str] = None, show_progress: bool = True, ) -> Dict[str, str]: """Upload a file or directory to a path inside the file system.""" source_name = Path(source).name dest = asset_hash # truncate path longer than 50 chars for terminal display if show_progress and len(source_name) >= 50: formatted_path = "{:.47}".format(source_name) + "..." else: formatted_path = source_name msg = f"Uploading {formatted_path}" # lock to prevent concurrent uploading of the same file or directory with uploading_lock[self.directory_client.directory_path + "/" + dest]: # start upload if os.path.isdir(source): subdir = self.directory_client.get_subdirectory_client(dest) if not subdir.exists(): # directory is uploaded based on asset hash for now, so skip uploading if subdir exists self.upload_dir( source, dest, msg=msg, show_progress=show_progress, ignore_file=ignore_file, ) else: self.upload_file(source, dest=dest, msg=msg, show_progress=show_progress) artifact_info = {"remote path": dest, "name": name, "version": version} return artifact_info def upload_file( self, source: str, dest: str, show_progress: Optional[bool] = None, msg: Optional[str] = None, in_directory: bool = False, subdirectory_client: Optional[ShareDirectoryClient] = None, callback: Optional[Any] = None, ) -> None: """ " Upload a single file to a path inside the file system directory.""" validate_content = os.stat(source).st_size > 0 # don't do checksum for empty files # relative path from root relative_path = Path(subdirectory_client.directory_path).relative_to(self.directory_client.directory_path) dest = Path(dest).relative_to(relative_path).as_posix() if "/" in dest: # dest is a folder, need to switch subdirectory client dest_dir, dest = dest.rsplit("/", 1) subdirectory_client = subdirectory_client.get_subdirectory_client(dest_dir) with open(source, "rb") as data: if in_directory: file_name = dest.rsplit("/")[-1] if show_progress: subdirectory_client.upload_file( file_name=file_name, data=data, validate_content=validate_content, raw_response_hook=callback, ) else: subdirectory_client.upload_file( file_name=file_name, data=data, validate_content=validate_content, ) else: if show_progress: with FileUploadProgressBar(msg=msg) as progress_bar: self.directory_client.upload_file( file_name=dest, data=data, validate_content=validate_content, raw_response_hook=progress_bar.update_to, ) else: self.directory_client.upload_file(file_name=dest, data=data, validate_content=validate_content) self.uploaded_file_count = self.uploaded_file_count + 1 def upload_dir( self, source: str, dest: str, msg: str, show_progress: bool, ignore_file: IgnoreFile, ) -> None: """Upload a directory to a path inside the fileshare directory.""" subdir = self.directory_client.create_subdirectory(dest) source_path = Path(source).resolve() prefix = dest + "/" upload_paths = get_upload_files_from_folder( path=source_path, prefix=prefix, ignore_file=ignore_file, ) upload_paths = sorted(upload_paths) self.total_file_count = len(upload_paths) # travers all directories recursively and create them in the fileshare def travers_recursively(child_dir, source_dir): for item in os.listdir(source_dir): item_path = os.path.join(source_dir, item) if os.path.isdir(item_path): new_dir = child_dir.create_subdirectory(item) travers_recursively(new_dir, item_path) travers_recursively(child_dir=subdir, source_dir=source) if show_progress: with DirectoryUploadProgressBar(dir_size=get_directory_size(source_path), msg=msg) as progress_bar: for src, destination in upload_paths: self.upload_file( src, destination, in_directory=True, subdirectory_client=subdir, show_progress=show_progress, callback=progress_bar.update_to, ) else: for src, destination in upload_paths: self.upload_file( src, destination, in_directory=True, subdirectory_client=subdir, show_progress=show_progress, ) def _check_file_share_directory_exist(self, dest) -> bool: """Check if the file share directory exists.""" return self.directory_client.get_subdirectory_client(dest).exists() def _check_file_share_file_exist(self, dest) -> bool: """Check if the file share directory exists.""" if dest.startswith(self.file_share_prefix): dest = dest.replace(f"{self.file_share_prefix}/", "") file_client = self.directory_client.get_file_client(dest) try: file_client.get_file_properties() except Exception: return False return True def _delete_file_share_directory(self, dir_client) -> None: """Recursively delete a directory with content in the file share.""" for item in dir_client.list_directories_and_files(): if isinstance(item, DirectoryProperties): self._delete_file_share_directory(dir_client.get_subdirectory_client(item.name)) else: dir_client.delete_file(item.name) dir_client.delete_directory()
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_flow_operations.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- # pylint: disable=protected-access import copy import json import os import re from datetime import datetime from functools import cached_property from pathlib import Path from typing import Dict, List, Optional, Union import requests from azure.ai.ml._artifacts._artifact_utilities import _check_and_upload_path from azure.ai.ml._scope_dependent_operations import ( OperationConfig, OperationsContainer, OperationScope, _ScopeDependentOperations, ) from azure.ai.ml.constants._common import SHORT_URI_FORMAT from azure.ai.ml.entities import Workspace from azure.ai.ml.operations._operation_orchestrator import OperationOrchestrator from azure.core.exceptions import HttpResponseError from promptflow._sdk._constants import ( CLIENT_FLOW_TYPE_2_SERVICE_FLOW_TYPE, DAG_FILE_NAME, FLOW_TOOLS_JSON, MAX_LIST_CLI_RESULTS, PROMPT_FLOW_DIR_NAME, WORKSPACE_LINKED_DATASTORE_NAME, FlowType, ListViewType, ) from promptflow._sdk._errors import FlowOperationError from promptflow._sdk._telemetry import ActivityType, WorkspaceTelemetryMixin, monitor_operation from promptflow._sdk._utils import PromptflowIgnoreFile, generate_flow_tools_json from promptflow._sdk._vendor._asset_utils import traverse_directory from promptflow._utils.logger_utils import get_cli_sdk_logger from promptflow.azure._constants._flow import DEFAULT_STORAGE from promptflow.azure._entities._flow import Flow from promptflow.azure._load_functions import load_flow from promptflow.azure._restclient.flow_service_caller import FlowServiceCaller from promptflow.azure.operations._artifact_utilities import _get_datastore_name, get_datastore_info from promptflow.azure.operations._fileshare_storeage_helper import FlowFileStorageClient from promptflow.exceptions import SystemErrorException, UserErrorException logger = get_cli_sdk_logger() class FlowOperations(WorkspaceTelemetryMixin, _ScopeDependentOperations): """FlowOperations that can manage flows. You should not instantiate this class directly. Instead, you should create a :class:`~promptflow.azure.PFClient` instance and this operation is available as the instance's attribute. """ _FLOW_RESOURCE_PATTERN = re.compile(r"azureml:.*?/workspaces/(?P<experiment_id>.*?)/flows/(?P<flow_id>.*?)$") def __init__( self, operation_scope: OperationScope, operation_config: OperationConfig, all_operations: OperationsContainer, credential, service_caller: FlowServiceCaller, workspace: Workspace, **kwargs: Dict, ): super().__init__( operation_scope=operation_scope, operation_config=operation_config, workspace_name=operation_scope.workspace_name, subscription_id=operation_scope.subscription_id, resource_group_name=operation_scope.resource_group_name, ) self._all_operations = all_operations self._service_caller = service_caller self._credential = credential self._workspace = workspace @cached_property def _workspace_id(self): return self._workspace._workspace_id @cached_property def _index_service_endpoint_url(self): """Get the endpoint url for the workspace.""" endpoint = self._service_caller._service_endpoint return endpoint + "index/v1.0" + self._service_caller._common_azure_url_pattern @monitor_operation(activity_name="pfazure.flows.create_or_update", activity_type=ActivityType.PUBLICAPI) def create_or_update(self, flow: Union[str, Path], display_name=None, type=None, **kwargs) -> Flow: """Create a flow to remote from local source, or update the metadata of an existing flow. .. note:: Functionality of updating flow metadata is yet to be supported. :param flow: The source of the flow to create. :type flow: Union[str, Path] :param display_name: The display name of the flow to create. Default to be flow folder name + timestamp if not specified. e.g. "web-classification-10-27-2023-14-19-10" :type display_name: str :param type: The type of the flow to create. One of ["standard", evaluation", "chat"]. Default to be "standard" if not specified. :type type: str :param description: The description of the flow to create. Default to be the description in flow yaml file. :type description: str :param tags: The tags of the flow to create. Default to be the tags in flow yaml file. :type tags: Dict[str, str] """ # validate the parameters azure_flow, flow_display_name, flow_type, kwargs = FlowOperations._validate_flow_creation_parameters( flow, display_name, type, **kwargs ) # upload to file share file_share_flow_path = self._resolve_flow_code_and_upload_to_file_share(flow=azure_flow) if not file_share_flow_path: raise FlowOperationError(f"File share path should not be empty, got {file_share_flow_path!r}.") # create flow to remote flow_definition_file_path = f"{file_share_flow_path}/{DAG_FILE_NAME}" rest_flow = self._create_remote_flow_via_file_share_path( flow_display_name=flow_display_name, flow_type=flow_type, flow_definition_file_path=flow_definition_file_path, **kwargs, ) result_flow = Flow._from_pf_service(rest_flow) flow_dict = result_flow._to_dict() print(f"Flow created successfully:\n{json.dumps(flow_dict, indent=4)}") return result_flow @staticmethod def _validate_flow_creation_parameters(source, flow_display_name=None, flow_type=None, **kwargs): """Validate the parameters for flow creation operation.""" # validate the source folder logger.info("Validating flow source.") if not Path(source, DAG_FILE_NAME).exists(): raise UserErrorException( f"Flow source must be a directory with flow definition yaml '{DAG_FILE_NAME}'. " f"Got {Path(source).resolve().as_posix()!r}." ) # validate flow source with flow schema logger.info("Validating flow schema.") flow_dict = FlowOperations._validate_flow_schema(source, flow_display_name, flow_type, **kwargs) logger.info("Validating flow creation parameters.") flow = load_flow(source) # if no flow name specified, use "flow name + timestamp" flow_display_name = flow_dict.get("display_name", None) if not flow_display_name: flow_display_name = f"{Path(source).name}-{datetime.now().strftime('%m-%d-%Y-%H-%M-%S')}" # if no flow type specified, use default flow type "standard" flow_type = flow_dict.get("type", None) if not flow_type: flow_type = FlowType.STANDARD # update description and tags to be the final value description = flow_dict.get("description", None) if isinstance(description, str): kwargs["description"] = description tags = flow_dict.get("tags", None) if tags: kwargs["tags"] = tags return flow, flow_display_name, flow_type, kwargs @staticmethod def _validate_flow_schema(source, display_name=None, type=None, **kwargs): """Validate the flow schema.""" from promptflow._sdk.entities._flow import ProtectedFlow params_override = copy.deepcopy(kwargs) if display_name is not None: params_override["display_name"] = display_name if type is not None: params_override["type"] = type flow_entity = ProtectedFlow.load(source=source, params_override=params_override) flow_entity._validate(raise_error=True) # raise error if validation failed flow_dict = flow_entity._dump_for_validation() return flow_dict def _resolve_flow_code_and_upload_to_file_share(self, flow: Flow, ignore_tools_json=False) -> str: remote_file_share_folder_name = f"{Path(flow.code).name}-{datetime.now().strftime('%m-%d-%Y-%H-%M-%S')}" ops = OperationOrchestrator(self._all_operations, self._operation_scope, self._operation_config) file_share_flow_path = "" logger.info("Building flow code.") with flow._build_code() as code: if code is None: raise FlowOperationError("Failed to build flow code.") # ignore flow.tools.json if needed (e.g. for flow run scenario) if ignore_tools_json: ignore_file = code._ignore_file if isinstance(ignore_file, PromptflowIgnoreFile): ignore_file._ignore_tools_json = ignore_tools_json else: raise FlowOperationError( message=f"Flow code should have PromptflowIgnoreFile, got {type(ignore_file)}" ) code.datastore = DEFAULT_STORAGE datastore_name = _get_datastore_name(datastore_name=DEFAULT_STORAGE) datastore_operation = ops._code_assets._datastore_operation datastore_info = get_datastore_info(datastore_operation, datastore_name) logger.debug("Creating storage client for uploading flow to file share.") storage_client = FlowFileStorageClient( credential=datastore_info["credential"], file_share_name=datastore_info["container_name"], account_url=datastore_info["account_url"], azure_cred=datastore_operation._credential, ) # set storage client to flow operation, can be used in test case self._storage_client = storage_client # check if the file share directory exists logger.debug("Checking if the file share directory exists.") if storage_client._check_file_share_directory_exist(remote_file_share_folder_name): raise FlowOperationError( f"Remote flow folder {remote_file_share_folder_name!r} already exists under " f"'{storage_client.file_share_prefix}'. Please change the flow folder name and try again." ) try: logger.info("Uploading flow directory to file share.") storage_client.upload_dir( source=code.path, dest=remote_file_share_folder_name, msg="test", ignore_file=code._ignore_file, show_progress=False, ) except Exception as e: raise FlowOperationError(f"Failed to upload flow to file share due to: {str(e)}.") from e file_share_flow_path = f"{storage_client.file_share_prefix}/{remote_file_share_folder_name}" logger.info(f"Successfully uploaded flow to file share path {file_share_flow_path!r}.") return file_share_flow_path def _create_remote_flow_via_file_share_path( self, flow_display_name, flow_type, flow_definition_file_path, **kwargs ): """Create a flow to remote from file share path.""" service_flow_type = CLIENT_FLOW_TYPE_2_SERVICE_FLOW_TYPE[flow_type] description = kwargs.get("description", None) tags = kwargs.get("tags", None) body = { "flow_name": flow_display_name, "flow_definition_file_path": flow_definition_file_path, "flow_type": service_flow_type, "description": description, "tags": tags, } rest_flow_result = self._service_caller.create_flow( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, body=body, ) return rest_flow_result def get(self, name: str) -> Flow: """Get a flow from azure. :param name: The name of the flow to get. :type name: str :return: The flow. :rtype: ~promptflow.azure.entities.Flow """ try: rest_flow = self._service_caller.get_flow( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, flow_id=name, experiment_id=self._workspace_id, # for flow operations, current experiment id is workspace id ) except HttpResponseError as e: if e.status_code == 404: raise FlowOperationError(f"Flow {name!r} not found.") from e else: raise FlowOperationError(f"Failed to get flow {name!r} due to: {str(e)}.") from e flow = Flow._from_pf_service(rest_flow) return flow @monitor_operation(activity_name="pfazure.flows.list", activity_type=ActivityType.PUBLICAPI) def list( self, max_results: int = MAX_LIST_CLI_RESULTS, flow_type: Optional[FlowType] = None, list_view_type: ListViewType = ListViewType.ACTIVE_ONLY, include_others: bool = False, **kwargs, ) -> List[Flow]: """List flows from azure. :param max_results: The max number of runs to return, defaults to 50, max is 100 :type max_results: int :param flow_type: The flow type, defaults to None, which means all flow types. Other supported flow types are ["standard", "evaluation", "chat"]. :type flow_type: Optional[FlowType] :param list_view_type: The list view type, defaults to ListViewType.ACTIVE_ONLY :type list_view_type: ListViewType :param include_others: Whether to list flows owned by other users in the remote workspace, defaults to False :type include_others: bool :return: The list of flows. :rtype: List[~promptflow.azure.entities.Flow] """ if not isinstance(max_results, int) or max_results < 1: raise FlowOperationError(f"'max_results' must be a positive integer, got {max_results!r}") normalized_flow_type = str(flow_type).lower() if flow_type is not None and normalized_flow_type not in FlowType.get_all_values(): raise FlowOperationError(f"'flow_type' must be one of {FlowType.get_all_values()}, got {flow_type!r}.") headers = self._service_caller._get_headers() if list_view_type == ListViewType.ACTIVE_ONLY: filter_archived = ["false"] elif list_view_type == ListViewType.ARCHIVED_ONLY: filter_archived = ["true"] elif list_view_type == ListViewType.ALL: filter_archived = ["true", "false"] else: raise FlowOperationError( f"Invalid list view type: {list_view_type!r}, expecting one of ['ActiveOnly', 'ArchivedOnly', 'All']" ) user_object_id, user_tenant_id = self._service_caller._get_user_identity_info() payload = { "filters": [ {"field": "type", "operator": "eq", "values": ["flows"]}, {"field": "annotations/isArchived", "operator": "eq", "values": filter_archived}, { "field": "properties/creationContext/createdBy/userTenantId", "operator": "eq", "values": [user_tenant_id], }, ], "freeTextSearch": "", "order": [{"direction": "Desc", "field": "properties/creationContext/createdTime"}], # index service can return 100 results at most "pageSize": min(max_results, 100), "skip": 0, "includeTotalResultCount": True, "searchBuilder": "AppendPrefix", } # add flow filter to only list flows from current user if not include_others: payload["filters"].append( { "field": "properties/creationContext/createdBy/userObjectId", "operator": "eq", "values": [user_object_id], } ) endpoint = self._index_service_endpoint_url url = endpoint + "/entities" response = requests.post(url, headers=headers, json=payload) if response.status_code == 200: entities = json.loads(response.text) flow_entities = entities["value"] else: raise FlowOperationError( f"Failed to get flows from index service. Code: {response.status_code}, text: {response.text}" ) # transform to flow instances flow_instances = [] for entity in flow_entities: flow = Flow._from_index_service(entity) flow_instances.append(flow) return flow_instances def _download(self, source, dest): # TODO: support download flow raise NotImplementedError("Not implemented yet") def _resolve_arm_id_or_upload_dependencies(self, flow: Flow, ignore_tools_json=False) -> None: ops = OperationOrchestrator(self._all_operations, self._operation_scope, self._operation_config) # resolve flow's code self._try_resolve_code_for_flow(flow=flow, ops=ops, ignore_tools_json=ignore_tools_json) @classmethod def _try_resolve_code_for_flow(cls, flow: Flow, ops: OperationOrchestrator, ignore_tools_json=False) -> None: if flow.path: # remote path if flow.path.startswith("azureml://datastores"): flow._code_uploaded = True return else: raise ValueError("Path is required for flow.") with flow._build_code() as code: if code is None: return if flow._code_uploaded: return # TODO(2567532): backend does not fully support generate flow.tools.json from blob storage yet if not (Path(code.path) / PROMPT_FLOW_DIR_NAME / FLOW_TOOLS_JSON).exists(): generate_flow_tools_json(code.path) # ignore flow.tools.json if needed (e.g. for flow run scenario) if ignore_tools_json: ignore_file = code._ignore_file if isinstance(ignore_file, PromptflowIgnoreFile): ignore_file._ignore_tools_json = ignore_tools_json else: raise SystemErrorException( message=f"Flow code should have PromptflowIgnoreFile, got {type(ignore_file)}" ) # flow directory per file upload summary # as the upload logic locates in azure-ai-ml, we cannot touch during the upload # copy the logic here to print per file upload summary ignore_file = code._ignore_file upload_paths = [] source_path = Path(code.path).resolve() prefix = os.path.basename(source_path) + "/" for root, _, files in os.walk(source_path, followlinks=True): upload_paths += list( traverse_directory( root, files, prefix=prefix, ignore_file=ignore_file, ) ) ignore_files = code._ignore_file._get_ignore_list() for file_path in ignore_files: logger.debug(f"will ignore file: {file_path}...") for file_path, _ in upload_paths: logger.debug(f"will upload file: {file_path}...") code.datastore = WORKSPACE_LINKED_DATASTORE_NAME # NOTE: For flow directory upload, we prefer to upload it to the workspace linked datastore, # therefore we will directly use _check_and_upload_path, instead of v2 SDK public API # CodeOperations.create_or_update, as later one will upload the code asset to another # container in the storage account, which may fail with vnet for MT. # However, we might run into list secret permission error(especially in Heron workspace), # in this case, we will leverage v2 SDK public API, which has solution for Heron, # and request MT with the blob url; # refer to except block for more details. try: uploaded_code_asset, _ = _check_and_upload_path( artifact=code, asset_operations=ops._code_assets, artifact_type="Code", datastore_name=WORKSPACE_LINKED_DATASTORE_NAME, # actually not work at all show_progress=True, ) path = uploaded_code_asset.path path = path[path.find("LocalUpload") :] # path on container flow.code = path # azureml://datastores/workspaceblobstore/paths/<path-to-flow-dag-yaml> flow.path = SHORT_URI_FORMAT.format( WORKSPACE_LINKED_DATASTORE_NAME, (Path(path) / flow.path).as_posix() ) except HttpResponseError as e: # catch authorization error for list secret on datastore if "AuthorizationFailed" in str(e) and "datastores/listSecrets/action" in str(e): uploaded_code_asset = ops._code_assets.create_or_update(code) path = uploaded_code_asset.path path = path.replace(".blob.core.windows.net:443/", ".blob.core.windows.net/") # remove :443 port flow.code = path # https://<storage-account-name>.blob.core.windows.net/<container-name>/<path-to-flow-dag-yaml> flow.path = f"{path}/{flow.path}" else: raise flow._code_uploaded = True # region deprecated but keep for runtime test dependencies def _resolve_arm_id_or_upload_dependencies_to_file_share(self, flow: Flow) -> None: ops = OperationOrchestrator(self._all_operations, self._operation_scope, self._operation_config) # resolve flow's code self._try_resolve_code_for_flow_to_file_share(flow=flow, ops=ops) @classmethod def _try_resolve_code_for_flow_to_file_share(cls, flow: Flow, ops: OperationOrchestrator) -> None: from azure.ai.ml._utils._storage_utils import AzureMLDatastorePathUri from ._artifact_utilities import _check_and_upload_path if flow.path: if flow.path.startswith("azureml://datastores"): # remote path path_uri = AzureMLDatastorePathUri(flow.path) if path_uri.datastore != DEFAULT_STORAGE: raise ValueError(f"Only {DEFAULT_STORAGE} is supported as remote storage for now.") flow.path = path_uri.path flow._code_uploaded = True return else: raise ValueError("Path is required for flow.") with flow._build_code() as code: if code is None: return if flow._code_uploaded: return code.datastore = DEFAULT_STORAGE uploaded_code_asset = _check_and_upload_path( artifact=code, asset_operations=ops._code_assets, artifact_type="Code", show_progress=False, ) if "remote_path" in uploaded_code_asset: path = uploaded_code_asset["remote_path"] elif "remote path" in uploaded_code_asset: path = uploaded_code_asset["remote path"] flow.code = path flow.path = (Path(path) / flow.path).as_posix() flow._code_uploaded = True # endregion
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_connection_operations.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from typing import Dict from azure.ai.ml._scope_dependent_operations import ( OperationConfig, OperationsContainer, OperationScope, _ScopeDependentOperations, ) from promptflow._sdk._utils import safe_parse_object_list from promptflow._sdk.entities._connection import _Connection from promptflow._utils.logger_utils import get_cli_sdk_logger from promptflow.azure._entities._workspace_connection_spec import WorkspaceConnectionSpec from promptflow.azure._restclient.flow_service_caller import FlowServiceCaller logger = get_cli_sdk_logger() class ConnectionOperations(_ScopeDependentOperations): """ConnectionOperations. You should not instantiate this class directly. Instead, you should create an PFClient instance that instantiates it for you and attaches it as an attribute. """ def __init__( self, operation_scope: OperationScope, operation_config: OperationConfig, all_operations: OperationsContainer, credential, service_caller: FlowServiceCaller, **kwargs: Dict, ): super(ConnectionOperations, self).__init__(operation_scope, operation_config) self._all_operations = all_operations self._service_caller = service_caller self._credential = credential def create_or_update(self, connection, **kwargs): rest_conn = connection._to_rest_object() # create flow draft rest_conn_result = self._service_caller.create_connection( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, connection_name=connection.name, body=rest_conn, ) return _Connection._from_mt_rest_object(rest_conn_result) def get(self, name, **kwargs): rest_conn = self._service_caller.get_connection( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, connection_name=name, **kwargs, ) return _Connection._from_mt_rest_object(rest_conn) def delete(self, name, **kwargs): return self._service_caller.delete_connection( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, connection_name=name, **kwargs, ) def list(self, **kwargs): rest_connections = self._service_caller.list_connections( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, **kwargs, ) return safe_parse_object_list( obj_list=rest_connections, parser=_Connection._from_mt_rest_object, message_generator=lambda x: f"Failed to load connection {x.connection_name}, skipped.", ) def list_connection_specs(self, **kwargs): results = self._service_caller.list_connection_specs( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, **kwargs, ) return [WorkspaceConnectionSpec._from_rest_object(spec) for spec in results]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_artifact_utilities.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- # pylint: disable=protected-access import os import uuid from datetime import datetime, timedelta from pathlib import Path from typing import Dict, Optional, TypeVar, Union from azure.ai.ml._artifacts._blob_storage_helper import BlobStorageClient from azure.ai.ml._artifacts._gen2_storage_helper import Gen2StorageClient from azure.ai.ml._azure_environments import _get_storage_endpoint_from_metadata from azure.ai.ml._restclient.v2022_10_01.models import DatastoreType from azure.ai.ml._scope_dependent_operations import OperationScope from azure.ai.ml._utils._arm_id_utils import ( AMLNamedArmId, get_resource_name_from_arm_id, is_ARM_id_for_resource, remove_aml_prefix, ) from azure.ai.ml._utils._asset_utils import ( IgnoreFile, _build_metadata_dict, _validate_path, get_ignore_file, get_object_hash, ) from azure.ai.ml._utils._storage_utils import ( AzureMLDatastorePathUri, get_artifact_path_from_storage_url, get_storage_client, ) from azure.ai.ml.constants._common import SHORT_URI_FORMAT, STORAGE_ACCOUNT_URLS from azure.ai.ml.entities import Environment from azure.ai.ml.entities._assets._artifacts.artifact import Artifact, ArtifactStorageInfo from azure.ai.ml.entities._credentials import AccountKeyConfiguration from azure.ai.ml.entities._datastore._constants import WORKSPACE_BLOB_STORE from azure.ai.ml.exceptions import ErrorTarget, ValidationException from azure.ai.ml.operations._datastore_operations import DatastoreOperations from azure.storage.blob import BlobSasPermissions, generate_blob_sas from azure.storage.filedatalake import FileSasPermissions, generate_file_sas from ..._utils.logger_utils import LoggerFactory from ._fileshare_storeage_helper import FlowFileStorageClient module_logger = LoggerFactory.get_logger(__name__) def _get_datastore_name(*, datastore_name: Optional[str] = WORKSPACE_BLOB_STORE) -> str: datastore_name = WORKSPACE_BLOB_STORE if not datastore_name else datastore_name try: datastore_name = get_resource_name_from_arm_id(datastore_name) except (ValueError, AttributeError, ValidationException): module_logger.debug("datastore_name %s is not a full arm id. Proceed with a shortened name.\n", datastore_name) datastore_name = remove_aml_prefix(datastore_name) if is_ARM_id_for_resource(datastore_name): datastore_name = get_resource_name_from_arm_id(datastore_name) return datastore_name def get_datastore_info(operations: DatastoreOperations, name: str) -> Dict[str, str]: """Get datastore account, type, and auth information.""" datastore_info = {} if name: datastore = operations.get(name, include_secrets=True) else: datastore = operations.get_default(include_secrets=True) storage_endpoint = _get_storage_endpoint_from_metadata() credentials = datastore.credentials datastore_info["storage_type"] = datastore.type datastore_info["storage_account"] = datastore.account_name datastore_info["account_url"] = STORAGE_ACCOUNT_URLS[datastore.type].format( datastore.account_name, storage_endpoint ) if isinstance(credentials, AccountKeyConfiguration): datastore_info["credential"] = credentials.account_key else: try: datastore_info["credential"] = credentials.sas_token except Exception as e: # pylint: disable=broad-except if not hasattr(credentials, "sas_token"): datastore_info["credential"] = operations._credential else: raise e if datastore.type == DatastoreType.AZURE_BLOB: datastore_info["container_name"] = str(datastore.container_name) elif datastore.type == DatastoreType.AZURE_DATA_LAKE_GEN2: datastore_info["container_name"] = str(datastore.filesystem) elif datastore.type == DatastoreType.AZURE_FILE: datastore_info["container_name"] = str(datastore.file_share_name) else: raise Exception( f"Datastore type {datastore.type} is not supported for uploads. " f"Supported types are {DatastoreType.AZURE_BLOB} and {DatastoreType.AZURE_DATA_LAKE_GEN2}." ) return datastore_info def list_logs_in_datastore(ds_info: Dict[str, str], prefix: str, legacy_log_folder_name: str) -> Dict[str, str]: """Returns a dictionary of file name to blob or data lake uri with SAS token, matching the structure of RunDetails.logFiles. legacy_log_folder_name: the name of the folder in the datastore that contains the logs /azureml-logs/*.txt is the legacy log structure for commandJob and sweepJob /logs/azureml/*.txt is the legacy log structure for pipeline parent Job """ if ds_info["storage_type"] not in [ DatastoreType.AZURE_BLOB, DatastoreType.AZURE_DATA_LAKE_GEN2, ]: raise Exception("Only Blob and Azure DataLake Storage Gen2 datastores are supported.") storage_client = get_storage_client( credential=ds_info["credential"], container_name=ds_info["container_name"], storage_account=ds_info["storage_account"], storage_type=ds_info["storage_type"], ) items = storage_client.list(starts_with=prefix + "/user_logs/") # Append legacy log files if present items.extend(storage_client.list(starts_with=prefix + legacy_log_folder_name)) log_dict = {} for item_name in items: sub_name = item_name.split(prefix + "/")[1] if isinstance(storage_client, BlobStorageClient): token = generate_blob_sas( account_name=ds_info["storage_account"], container_name=ds_info["container_name"], blob_name=item_name, account_key=ds_info["credential"], permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(minutes=30), ) elif isinstance(storage_client, Gen2StorageClient): token = generate_file_sas( # pylint: disable=no-value-for-parameter account_name=ds_info["storage_account"], file_system_name=ds_info["container_name"], file_name=item_name, credential=ds_info["credential"], permission=FileSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(minutes=30), ) log_dict[sub_name] = "{}/{}/{}?{}".format(ds_info["account_url"], ds_info["container_name"], item_name, token) return log_dict def _get_default_datastore_info(datastore_operation): return get_datastore_info(datastore_operation, None) def upload_artifact( local_path: str, datastore_operation: DatastoreOperations, operation_scope: OperationScope, datastore_name: Optional[str], asset_hash: Optional[str] = None, show_progress: bool = True, asset_name: Optional[str] = None, asset_version: Optional[str] = None, ignore_file: IgnoreFile = IgnoreFile(None), sas_uri=None, ) -> ArtifactStorageInfo: """Upload local file or directory to datastore.""" if sas_uri: storage_client = get_storage_client(credential=None, storage_account=None, account_url=sas_uri) else: datastore_name = _get_datastore_name(datastore_name=datastore_name) datastore_info = get_datastore_info(datastore_operation, datastore_name) storage_client = FlowFileStorageClient( credential=datastore_info["credential"], file_share_name=datastore_info["container_name"], account_url=datastore_info["account_url"], azure_cred=datastore_operation._credential, ) artifact_info = storage_client.upload( local_path, asset_hash=asset_hash, show_progress=show_progress, name=asset_name, version=asset_version, ignore_file=ignore_file, ) artifact_info["remote path"] = os.path.join( storage_client.directory_client.directory_path, artifact_info["remote path"] ) return artifact_info def download_artifact( starts_with: Union[str, os.PathLike], destination: str, datastore_operation: DatastoreOperations, datastore_name: Optional[str], datastore_info: Optional[Dict] = None, ) -> str: """Download datastore path to local file or directory. :param Union[str, os.PathLike] starts_with: Prefix of blobs to download :param str destination: Path that files will be written to :param DatastoreOperations datastore_operation: Datastore operations :param Optional[str] datastore_name: name of datastore :param Dict datastore_info: the return value of invoking get_datastore_info :return str: Path that files were written to """ starts_with = starts_with.as_posix() if isinstance(starts_with, Path) else starts_with datastore_name = _get_datastore_name(datastore_name=datastore_name) if datastore_info is None: datastore_info = get_datastore_info(datastore_operation, datastore_name) storage_client = get_storage_client(**datastore_info) storage_client.download(starts_with=starts_with, destination=destination) return destination def download_artifact_from_storage_url( blob_url: str, destination: str, datastore_operation: DatastoreOperations, datastore_name: Optional[str], ) -> str: """Download datastore blob URL to local file or directory.""" datastore_name = _get_datastore_name(datastore_name=datastore_name) datastore_info = get_datastore_info(datastore_operation, datastore_name) starts_with = get_artifact_path_from_storage_url( blob_url=str(blob_url), container_name=datastore_info.get("container_name") ) return download_artifact( starts_with=starts_with, destination=destination, datastore_operation=datastore_operation, datastore_name=datastore_name, datastore_info=datastore_info, ) def download_artifact_from_aml_uri(uri: str, destination: str, datastore_operation: DatastoreOperations): """Downloads artifact pointed to by URI of the form `azureml://...` to destination. :param str uri: AzureML uri of artifact to download :param str destination: Path to download artifact to :param DatastoreOperations datastore_operation: datastore operations :return str: Path that files were downloaded to """ parsed_uri = AzureMLDatastorePathUri(uri) return download_artifact( starts_with=parsed_uri.path, destination=destination, datastore_operation=datastore_operation, datastore_name=parsed_uri.datastore, ) def aml_datastore_path_exists( uri: str, datastore_operation: DatastoreOperations, datastore_info: Optional[dict] = None ): """Checks whether `uri` of the form "azureml://" points to either a directory or a file. :param str uri: azure ml datastore uri :param DatastoreOperations datastore_operation: Datastore operation :param dict datastore_info: return value of get_datastore_info """ parsed_uri = AzureMLDatastorePathUri(uri) datastore_info = datastore_info or get_datastore_info(datastore_operation, parsed_uri.datastore) return get_storage_client(**datastore_info).exists(parsed_uri.path) def _upload_to_datastore( operation_scope: OperationScope, datastore_operation: DatastoreOperations, path: Union[str, Path, os.PathLike], artifact_type: str, datastore_name: Optional[str] = None, show_progress: bool = True, asset_name: Optional[str] = None, asset_version: Optional[str] = None, asset_hash: Optional[str] = None, ignore_file: Optional[IgnoreFile] = None, sas_uri: Optional[str] = None, # contains registry sas url ) -> ArtifactStorageInfo: _validate_path(path, _type=artifact_type) if not ignore_file: ignore_file = get_ignore_file(path) if not asset_hash: asset_hash = get_object_hash(path, ignore_file) artifact = upload_artifact( str(path), datastore_operation, operation_scope, datastore_name, show_progress=show_progress, asset_hash=asset_hash, asset_name=asset_name, asset_version=asset_version, ignore_file=ignore_file, sas_uri=sas_uri, ) return artifact def _upload_and_generate_remote_uri( operation_scope: OperationScope, datastore_operation: DatastoreOperations, path: Union[str, Path, os.PathLike], artifact_type: str = ErrorTarget.ARTIFACT, datastore_name: str = WORKSPACE_BLOB_STORE, show_progress: bool = True, ) -> str: # Asset name is required for uploading to a datastore asset_name = str(uuid.uuid4()) artifact_info = _upload_to_datastore( operation_scope=operation_scope, datastore_operation=datastore_operation, path=path, datastore_name=datastore_name, asset_name=asset_name, artifact_type=artifact_type, show_progress=show_progress, ) path = artifact_info.relative_path datastore = AMLNamedArmId(artifact_info.datastore_arm_id).asset_name return SHORT_URI_FORMAT.format(datastore, path) def _update_metadata(name, version, indicator_file, datastore_info) -> None: storage_client = get_storage_client(**datastore_info) if isinstance(storage_client, BlobStorageClient): _update_blob_metadata(name, version, indicator_file, storage_client) elif isinstance(storage_client, Gen2StorageClient): _update_gen2_metadata(name, version, indicator_file, storage_client) def _update_blob_metadata(name, version, indicator_file, storage_client) -> None: container_client = storage_client.container_client if indicator_file.startswith(storage_client.container): indicator_file = indicator_file.split(storage_client.container)[1] blob = container_client.get_blob_client(blob=indicator_file) blob.set_blob_metadata(_build_metadata_dict(name=name, version=version)) def _update_gen2_metadata(name, version, indicator_file, storage_client) -> None: artifact_directory_client = storage_client.file_system_client.get_directory_client(indicator_file) artifact_directory_client.set_metadata(_build_metadata_dict(name=name, version=version)) T = TypeVar("T", bound=Artifact) def _check_and_upload_path( artifact: T, asset_operations: Union["DataOperations", "ModelOperations", "CodeOperations", "FeatureSetOperations"], artifact_type: str, datastore_name: Optional[str] = None, sas_uri: Optional[str] = None, show_progress: bool = True, ): """Checks whether `artifact` is a path or a uri and uploads it to the datastore if necessary. param T artifact: artifact to check and upload param Union["DataOperations", "ModelOperations", "CodeOperations"] asset_operations: the asset operations to use for uploading param str datastore_name: the name of the datastore to upload to param str sas_uri: the sas uri to use for uploading """ from azure.ai.ml._utils.utils import is_mlflow_uri, is_url datastore_name = artifact.datastore if ( hasattr(artifact, "local_path") and artifact.local_path is not None or ( hasattr(artifact, "path") and artifact.path is not None and not (is_url(artifact.path) or is_mlflow_uri(artifact.path)) ) ): path = ( Path(artifact.path) if hasattr(artifact, "path") and artifact.path is not None else Path(artifact.local_path) ) if not path.is_absolute(): path = Path(artifact.base_path, path).resolve() uploaded_artifact = _upload_to_datastore( asset_operations._operation_scope, asset_operations._datastore_operation, path, datastore_name=datastore_name, asset_name=artifact.name, asset_version=str(artifact.version), asset_hash=artifact._upload_hash if hasattr(artifact, "_upload_hash") else None, sas_uri=sas_uri, artifact_type=artifact_type, show_progress=show_progress, ignore_file=getattr(artifact, "_ignore_file", None), ) return uploaded_artifact def _check_and_upload_env_build_context( environment: Environment, operations: "EnvironmentOperations", sas_uri=None, show_progress: bool = True, ) -> Environment: if environment.path: uploaded_artifact = _upload_to_datastore( operations._operation_scope, operations._datastore_operation, environment.path, asset_name=environment.name, asset_version=str(environment.version), asset_hash=environment._upload_hash, sas_uri=sas_uri, artifact_type=ErrorTarget.ENVIRONMENT, datastore_name=environment.datastore, show_progress=show_progress, ) # TODO: Depending on decision trailing "/" needs to stay or not. EMS requires it to be present environment.build.path = uploaded_artifact.full_storage_path + "/" return environment
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_ml/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- """ This file stores functions and objects that will be used in prompt-flow sdk. DO NOT change the module names in "all" list, add new modules if needed. """ class _DummyCallableClassForLazyImportError: """This class is used to put off ImportError until the imported class or function is called.""" @classmethod def _get_message(cls): return "azure-ai-ml is not installed. Please install azure-ai-ml to use this feature." def __init__(self, *args, **kwargs): raise ImportError(self._get_message()) def __call__(self, *args, **kwargs): raise ImportError(self._get_message()) # TODO: avoid import azure.ai.ml if promptflow.azure.configure is not called try: from azure.ai.ml import MLClient, load_component from azure.ai.ml.entities import Component from azure.ai.ml.entities._assets import Code from azure.ai.ml.entities._component._additional_includes import AdditionalIncludesMixin from azure.ai.ml.entities._load_functions import load_common except ImportError: class load_component(_DummyCallableClassForLazyImportError): pass class Component(_DummyCallableClassForLazyImportError): pass class MLClient(_DummyCallableClassForLazyImportError): pass class load_common(_DummyCallableClassForLazyImportError): pass class Code(_DummyCallableClassForLazyImportError): pass class AdditionalIncludesMixin(_DummyCallableClassForLazyImportError): pass __all__ = [ "load_component", "Component", "MLClient", "load_common", "Code", "AdditionalIncludesMixin", ]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_schemas/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_schemas/_flow_schema.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import json from pathlib import Path from azure.ai.ml._schema import UnionField, YamlFileSchema from azure.ai.ml._schema.core.fields import LocalPathField from marshmallow import fields, post_load from promptflow._utils.logger_utils import LoggerFactory module_logger = LoggerFactory.get_logger(__name__) class FlowSchema(YamlFileSchema): name = fields.Str(attribute="name") id = fields.Str(attribute="id") description = fields.Str(attribute="description") tags = fields.Dict(keys=fields.Str, attribute="tags") path = UnionField( [ LocalPathField(), fields.Str(), ], ) display_name = fields.Str(attribute="display_name") type = fields.Str(attribute="type") properties = fields.Dict(keys=fields.Str, attribute="properties") @post_load def update_properties(self, dct, **kwargs): folder = Path(self.context["base_path"]) flow_type = dct.get("type") if flow_type: mapping = { "standard": "default", "evaluate": "evaluation", } dct["type"] = mapping[flow_type] properties = dct.get("properties") if properties and "promptflow.batch_inputs" in properties: input_path = properties["promptflow.batch_inputs"] samples_file = folder / input_path if samples_file.exists(): with open(samples_file, "r", encoding="utf-8") as fp: properties["promptflow.batch_inputs"] = json.loads(fp.read()) return dct
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_constants/_flow.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- class FlowType: STANDARD = "standard" CHAT = "chat" EVALUATION = "evaluate" class FlowJobType: STANDARD = "azureml.promptflow.FlowRun" EVALUATION = "azureml.promptflow.EvaluationRun" # Use this storage since it's the storage used by notebook DEFAULT_STORAGE = "workspaceworkingdirectory" PROMPTFLOW_FILE_SHARE_DIR = "promptflow" CLOUD_RUNS_PAGE_SIZE = 25 # align with UX SESSION_CREATION_TIMEOUT_SECONDS = 10 * 60 # 10 minutes SESSION_CREATION_TIMEOUT_ENV_VAR = "PROMPTFLOW_SESSION_CREATION_TIMEOUT_SECONDS" ENVIRONMENT = "environment" PYTHON_REQUIREMENTS_TXT = "python_requirements_txt" ADDITIONAL_INCLUDES = "additional_includes" BASE_IMAGE = "image" AUTOMATIC_RUNTIME_NAME = "automatic" AUTOMATIC_RUNTIME = "automatic runtime"
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_constants/_component.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from pathlib import Path RESOURCE_FOLDER = Path(__file__).parent.parent / "resources" COMMAND_COMPONENT_SPEC_TEMPLATE = RESOURCE_FOLDER / "component_spec_template.yaml" DEFAULT_PYTHON_VERSION = "3.9"
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_constants/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore from ._component import COMMAND_COMPONENT_SPEC_TEMPLATE, DEFAULT_PYTHON_VERSION from ._flow import FlowJobType, FlowType __all__ = ["FlowJobType", "FlowType", "DEFAULT_PYTHON_VERSION", "COMMAND_COMPONENT_SPEC_TEMPLATE"]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_entities/_flow.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import copy import os.path from contextlib import contextmanager from os import PathLike from pathlib import Path from typing import Dict, List, Optional, Union import pydash from promptflow._sdk._constants import DAG_FILE_NAME, SERVICE_FLOW_TYPE_2_CLIENT_FLOW_TYPE, AzureFlowSource, FlowType from promptflow.azure._ml import AdditionalIncludesMixin, Code from ..._sdk._utils import PromptflowIgnoreFile, load_yaml, remove_empty_element_from_dict from ..._utils.flow_utils import dump_flow_dag, load_flow_dag from ..._utils.logger_utils import LoggerFactory from .._constants._flow import ADDITIONAL_INCLUDES, DEFAULT_STORAGE, ENVIRONMENT, PYTHON_REQUIREMENTS_TXT from .._restclient.flow.models import FlowDto # pylint: disable=redefined-builtin, unused-argument, f-string-without-interpolation logger = LoggerFactory.get_logger(__name__) class Flow(AdditionalIncludesMixin): DEFAULT_REQUIREMENTS_FILE_NAME = "requirements.txt" def __init__( self, path: Union[str, PathLike], name: Optional[str] = None, type: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs, ): self._flow_source = kwargs.pop("flow_source", AzureFlowSource.LOCAL) self.path = path self.name = name self.type = type or FlowType.STANDARD self.display_name = kwargs.get("display_name", None) or name self.description = description self.tags = tags self.owner = kwargs.get("owner", None) self.is_archived = kwargs.get("is_archived", None) self.created_date = kwargs.get("created_date", None) self.flow_portal_url = kwargs.get("flow_portal_url", None) if self._flow_source == AzureFlowSource.LOCAL: absolute_path = self._validate_flow_from_source(path) # flow snapshot folder self.code = absolute_path.parent.as_posix() self._code_uploaded = False self.path = absolute_path.name self._flow_dict = self._load_flow_yaml(absolute_path) self.display_name = self.display_name or absolute_path.parent.name self.description = description or self._flow_dict.get("description", None) self.tags = tags or self._flow_dict.get("tags", None) elif self._flow_source == AzureFlowSource.PF_SERVICE: self.code = kwargs.get("flow_resource_id", None) elif self._flow_source == AzureFlowSource.INDEX: self.code = kwargs.get("entity_id", None) def _validate_flow_from_source(self, source: Union[str, PathLike]) -> Path: """Validate flow from source. :param source: The source of the flow. :type source: Union[str, PathLike] """ absolute_path = Path(source).resolve().absolute() if absolute_path.is_dir(): absolute_path = absolute_path / DAG_FILE_NAME if not absolute_path.exists(): raise ValueError(f"Flow file {absolute_path.as_posix()} does not exist.") return absolute_path def _load_flow_yaml(self, path: Union[str, Path]) -> Dict: """Load flow yaml file. :param path: The path of the flow yaml file. :type path: str """ return load_yaml(path) @classmethod def _resolve_requirements(cls, flow_path: Union[str, Path], flow_dag: dict): """If requirements.txt exists, add it to the flow snapshot. Return True if flow_dag is updated.""" flow_dir = Path(flow_path) if not (flow_dir / cls.DEFAULT_REQUIREMENTS_FILE_NAME).exists(): return False if pydash.get(flow_dag, f"{ENVIRONMENT}.{PYTHON_REQUIREMENTS_TXT}"): return False logger.debug( f"requirements.txt is found in the flow folder: {flow_path.resolve().as_posix()}, " "adding it to flow.dag.yaml." ) pydash.set_(flow_dag, f"{ENVIRONMENT}.{PYTHON_REQUIREMENTS_TXT}", cls.DEFAULT_REQUIREMENTS_FILE_NAME) return True @classmethod def _remove_additional_includes(cls, flow_dag: dict): """Remove additional includes from flow dag. Return True if removed.""" if ADDITIONAL_INCLUDES not in flow_dag: return False logger.debug("Additional includes are found in the flow dag, removing them from flow.dag.yaml after resolved.") flow_dag.pop(ADDITIONAL_INCLUDES, None) return True # region AdditionalIncludesMixin @contextmanager def _try_build_local_code(self) -> Optional[Code]: """Try to create a Code object pointing to local code and yield it. If there is no local code to upload, yield None. Otherwise, yield a Code object pointing to the code. """ with super()._try_build_local_code() as code: dag_updated = False if isinstance(code, Code): flow_dir = Path(code.path) _, flow_dag = load_flow_dag(flow_path=flow_dir) original_flow_dag = copy.deepcopy(flow_dag) if self._get_all_additional_includes_configs(): # Remove additional include in the flow yaml. dag_updated = self._remove_additional_includes(flow_dag) # promptflow snapshot has specific ignore logic, like it should ignore `.run` by default code._ignore_file = PromptflowIgnoreFile(flow_dir) # promptflow snapshot will always be uploaded to default storage code.datastore = DEFAULT_STORAGE dag_updated = self._resolve_requirements(flow_dir, flow_dag) or dag_updated if dag_updated: dump_flow_dag(flow_dag, flow_dir) try: yield code finally: if dag_updated: dump_flow_dag(original_flow_dag, flow_dir) def _get_base_path_for_code(self) -> Path: """Get base path for additional includes.""" # note that self.code is an absolute path, so it is safe to use it as base path return Path(self.code) def _get_all_additional_includes_configs(self) -> List: """Get all additional include configs. For flow, its additional include need to be read from dag with a helper function. """ from promptflow._sdk._utils import _get_additional_includes return _get_additional_includes(os.path.join(self.code, self.path)) # endregion @classmethod def _from_pf_service(cls, rest_object: FlowDto): return cls( flow_source=AzureFlowSource.PF_SERVICE, path=rest_object.flow_definition_file_path, name=rest_object.flow_id, type=SERVICE_FLOW_TYPE_2_CLIENT_FLOW_TYPE[str(rest_object.flow_type).lower()], description=rest_object.description, tags=rest_object.tags, display_name=rest_object.flow_name, flow_resource_id=rest_object.flow_resource_id, owner=rest_object.owner.as_dict(), is_archived=rest_object.is_archived, created_date=rest_object.created_date, flow_portal_url=rest_object.studio_portal_endpoint, ) @classmethod def _from_index_service(cls, rest_object: Dict): properties = rest_object["properties"] annotations = rest_object["annotations"] flow_type = properties.get("flowType", None).lower() # rag type flow is shown as standard flow in UX, not sure why this type exists in service code if flow_type == "rag": flow_type = FlowType.STANDARD elif flow_type: flow_type = SERVICE_FLOW_TYPE_2_CLIENT_FLOW_TYPE[flow_type] return cls( flow_source=AzureFlowSource.INDEX, path=properties.get("flowDefinitionFilePath", None), name=properties.get("flowId", None), display_name=annotations.get("flowName", None), type=flow_type, description=annotations.get("description", None), tags=annotations.get("tags", None), entity_id=rest_object["entityId"], owner=annotations.get("owner", None), is_archived=annotations.get("isArchived", None), created_date=annotations.get("createdDate", None), ) def _to_dict(self): result = { "name": self.name, "type": self.type, "description": self.description, "tags": self.tags, "path": self.path, "code": str(self.code), "display_name": self.display_name, "owner": self.owner, "is_archived": self.is_archived, "created_date": str(self.created_date), "flow_portal_url": self.flow_portal_url, } return remove_empty_element_from_dict(result)
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_entities/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_entities/_workspace_connection_spec.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from dataclasses import asdict, dataclass from promptflow.azure._restclient.flow.models import ConnectionConfigSpec as RestConnectionConfigSpec from promptflow.azure._restclient.flow.models import WorkspaceConnectionSpec as RestWorkspaceConnectionSpec @dataclass class ConnectionConfigSpec: name: str display_name: str config_value_type: str default_value: str = None description: str = None enum_values: list = None is_optional: bool = False @classmethod def _from_rest_object(cls, rest_obj: RestConnectionConfigSpec): return cls( name=rest_obj.name, display_name=rest_obj.display_name, config_value_type=rest_obj.config_value_type, default_value=rest_obj.default_value, description=rest_obj.description, enum_values=rest_obj.enum_values, is_optional=rest_obj.is_optional, ) def _to_dict(self): return asdict(self, dict_factory=lambda x: {k: v for (k, v) in x if v is not None}) @dataclass class WorkspaceConnectionSpec: module: str connection_type: str # Connection type example: AzureOpenAI flow_value_type: str # Flow value type is the input.type on node, example: AzureOpenAIConnection config_specs: list = None @classmethod def _from_rest_object(cls, rest_obj: RestWorkspaceConnectionSpec): return cls( config_specs=[ ConnectionConfigSpec._from_rest_object(config_spec) for config_spec in (rest_obj.config_specs or []) ], module=rest_obj.module, connection_type=rest_obj.connection_type, flow_value_type=rest_obj.flow_value_type, ) def _to_dict(self): return asdict(self, dict_factory=lambda x: {k: v for (k, v) in x if v is not None})
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/README.md
# How to automatically generate the REST client code Rest client code in this folder are not manually written, but generated by autorest. ## Setup + install [nodejs](https://nodejs.org/en) + install autorest + run `npm install -g autorest` ## Download swagger.json Download swagger.json from [here](https://int.api.azureml-test.ms/flow/swagger/v1.0/swagger.json) to [promptflow/azure/_restclient](../promptflow/azure/_restclient) ## Update code + cd to [promptflow/azure/_restclient](../promptflow/azure/_restclient) + run `autorest --v3 --python --track2 --version=3.8.0 --use=@autorest/[email protected] --input-file=swagger.json --output-folder=. --namespace=flow --modelerfour.lenient-model-deduplication` + don't change `--use`. latest version of `autorest/python` will generate code following different pattern, which is not compatible with our code. ## Update the generation history - 2023.11.13 - [Update SDK restclient](https://github.com/microsoft/promptflow/pull/1101). - 2023.12.18 - [Remove data portal url from the result of pfazure run show](https://github.com/microsoft/promptflow/pull/1497) ## Troubleshooting ### Duplicate object schemas with "xxx" name detected. This may be caused by the duplicate generated class names. ```json "FlowFeature": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "state": { "type": "object", "properties": { "Runtime": { "$ref": "#/components/schemas/FlowFeatureState" }, "Executor": { "$ref": "#/components/schemas/FlowFeatureState" }, "PFS": { "$ref": "#/components/schemas/FlowFeatureState" } }, "additionalProperties": false, "nullable": true } }, "additionalProperties": false }, "FlowFeatureState": { "enum": [ "Ready", "E2ETest" ], "type": "string" }, ``` `FlowFeature` has a nested object field `state`, which will be generated to a new class named `FlowFeatureState`, and it duplicates with the enum `FlowFeatureState`. To fix this, server side needs to change the class name in the schema, in this case, server side changed the object `state` to `states` and the problem is resolved.
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/swagger.json
{ "openapi": "3.0.1", "info": { "title": "Azure Machine Learning Designer Service Client", "version": "1.0.0" }, "paths": { "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/submit": { "post": { "tags": [ "BulkRuns" ], "operationId": "BulkRuns_SubmitBulkRun", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/SubmitBulkRunRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "string" } } } }, "202": { "description": "Accepted", "content": { "application/json": { "schema": { "type": "string" } } } }, "204": { "description": "No Content", "content": { "application/json": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/cancel": { "post": { "tags": [ "BulkRuns" ], "operationId": "BulkRuns_CancelFlowRun", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "text/plain": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/clone": { "post": { "tags": [ "BulkRuns" ], "operationId": "BulkRuns_CloneFlowFromFlowRun", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateFlowRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}": { "get": { "tags": [ "BulkRuns" ], "operationId": "BulkRuns_GetFlowRunInfo", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowRunInfo" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/childRuns": { "get": { "tags": [ "BulkRuns" ], "operationId": "BulkRuns_GetFlowChildRuns", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "index", "in": "query", "schema": { "type": "integer", "format": "int32" } }, { "name": "startIndex", "in": "query", "schema": { "type": "integer", "format": "int32" } }, { "name": "endIndex", "in": "query", "schema": { "type": "integer", "format": "int32" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/nodeRuns/{nodeName}": { "get": { "tags": [ "BulkRuns" ], "operationId": "BulkRuns_GetFlowNodeRuns", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "nodeName", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "index", "in": "query", "schema": { "type": "integer", "format": "int32" } }, { "name": "startIndex", "in": "query", "schema": { "type": "integer", "format": "int32" } }, { "name": "endIndex", "in": "query", "schema": { "type": "integer", "format": "int32" } }, { "name": "aggregation", "in": "query", "schema": { "type": "boolean", "default": false } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/nodeRuns/{nodeName}/basePath": { "get": { "tags": [ "BulkRuns" ], "operationId": "BulkRuns_GetFlowNodeRunBasePath", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "nodeName", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowRunBasePath" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/logContent": { "get": { "tags": [ "BulkRuns" ], "operationId": "BulkRuns_GetFlowRunLogContent", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}": { "post": { "tags": [ "Connection" ], "operationId": "Connection_CreateConnection", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "connectionName", "in": "path", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateOrUpdateConnectionRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConnectionEntity" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "put": { "tags": [ "Connection" ], "operationId": "Connection_UpdateConnection", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "connectionName", "in": "path", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateOrUpdateConnectionRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConnectionEntity" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "get": { "tags": [ "Connection" ], "operationId": "Connection_GetConnection", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "connectionName", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConnectionEntity" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "delete": { "tags": [ "Connection" ], "operationId": "Connection_DeleteConnection", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "connectionName", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "connectionScope", "in": "query", "schema": { "$ref": "#/components/schemas/ConnectionScope" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConnectionEntity" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection": { "get": { "tags": [ "Connection" ], "operationId": "Connection_ListConnections", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { "$ref": "#/components/schemas/ConnectionEntity" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/specs": { "get": { "tags": [ "Connection" ], "operationId": "Connection_ListConnectionSpecs", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { "$ref": "#/components/schemas/ConnectionSpec" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}": { "post": { "tags": [ "Connections" ], "operationId": "Connections_CreateConnection", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "connectionName", "in": "path", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateOrUpdateConnectionRequestDto" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConnectionDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "put": { "tags": [ "Connections" ], "operationId": "Connections_UpdateConnection", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "connectionName", "in": "path", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateOrUpdateConnectionRequestDto" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConnectionDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "get": { "tags": [ "Connections" ], "operationId": "Connections_GetConnection", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "connectionName", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConnectionDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "delete": { "tags": [ "Connections" ], "operationId": "Connections_DeleteConnection", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "connectionName", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConnectionDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}/listsecrets": { "get": { "tags": [ "Connections" ], "operationId": "Connections_GetConnectionWithSecrets", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "connectionName", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConnectionDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections": { "get": { "tags": [ "Connections" ], "operationId": "Connections_ListConnections", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { "$ref": "#/components/schemas/ConnectionDto" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/specs": { "get": { "tags": [ "Connections" ], "operationId": "Connections_ListConnectionSpecs", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { "$ref": "#/components/schemas/WorkspaceConnectionSpec" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}/AzureOpenAIDeployments": { "get": { "tags": [ "Connections" ], "operationId": "Connections_ListAzureOpenAIDeployments", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "connectionName", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { "$ref": "#/components/schemas/AzureOpenAIDeploymentDto" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/submit": { "post": { "tags": [ "FlowRunsAdmin" ], "operationId": "FlowRunsAdmin_SubmitBulkRunAsync", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "bulkRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "errorHandlingMode", "in": "query", "schema": { "$ref": "#/components/schemas/ErrorHandlingMode" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/SubmitBulkRunResponse" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/policy": { "post": { "tags": [ "FlowRunsAdmin" ], "operationId": "FlowRunsAdmin_SendPolicyValidationAsync", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "bulkRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PolicyValidationResponse" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "get": { "tags": [ "FlowRunsAdmin" ], "operationId": "FlowRunsAdmin_CheckPolicyValidationAsync", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "bulkRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PolicyValidationResponse" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/LogResult": { "post": { "tags": [ "FlowRunsAdmin" ], "operationId": "FlowRunsAdmin_LogResultForBulkRun", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "bulkRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { "$ref": "#/components/schemas/KeyValuePairStringObject" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/storageInfo": { "get": { "tags": [ "FlowRunsAdmin" ], "operationId": "FlowRunsAdmin_GetStorageInfo", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/StorageInfo" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/flowRuns/{flowRunId}/runtime/{runtimeVersion}/logEvent": { "post": { "tags": [ "FlowRunsAdmin" ], "operationId": "FlowRunsAdmin_LogFlowRunEvent", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "runtimeVersion", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/flowRuns/{flowRunId}/logEvent": { "post": { "tags": [ "FlowRunsAdmin" ], "operationId": "FlowRunsAdmin_LogFlowRunEventV2", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "runtimeVersion", "in": "query", "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/flowRuns/{flowRunId}/logTerminatedEvent": { "post": { "tags": [ "FlowRunsAdmin" ], "operationId": "FlowRunsAdmin_LogFlowRunTerminatedEvent", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "lastCheckedTime", "in": "query", "schema": { "type": "string", "format": "date-time" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/LogRunTerminatedEventDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/serviceLogs": { "post": { "tags": [ "FlowRunsAdmin" ], "operationId": "FlowRunsAdmin_UpdateServiceLogs", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "bulkRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ServiceLogRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/Task" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/serviceLogs/batch": { "post": { "tags": [ "FlowRunsAdmin" ], "operationId": "FlowRunsAdmin_BatchUpdateServiceLogs", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "bulkRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "type": "array", "items": { "$ref": "#/components/schemas/ServiceLogRequest" } } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/Task" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}": { "post": { "tags": [ "FlowRuntimes" ], "operationId": "FlowRuntimes_CreateRuntime", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "runtimeName", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "asyncCall", "in": "query", "schema": { "type": "boolean", "default": false } }, { "name": "msiToken", "in": "query", "schema": { "type": "boolean", "default": false } }, { "name": "skipPortCheck", "in": "query", "schema": { "type": "boolean", "default": false } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateFlowRuntimeRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowRuntimeDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "put": { "tags": [ "FlowRuntimes" ], "operationId": "FlowRuntimes_UpdateRuntime", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "runtimeName", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "asyncCall", "in": "query", "schema": { "type": "boolean", "default": false } }, { "name": "msiToken", "in": "query", "schema": { "type": "boolean", "default": false } }, { "name": "skipPortCheck", "in": "query", "schema": { "type": "boolean", "default": false } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/UpdateFlowRuntimeRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowRuntimeDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "get": { "tags": [ "FlowRuntimes" ], "operationId": "FlowRuntimes_GetRuntime", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "runtimeName", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowRuntimeDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "delete": { "tags": [ "FlowRuntimes" ], "operationId": "FlowRuntimes_DeleteRuntime", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "runtimeName", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "asyncCall", "in": "query", "schema": { "type": "boolean", "default": false } }, { "name": "msiToken", "in": "query", "schema": { "type": "boolean", "default": false } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowRuntimeDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/checkCiAvailability": { "get": { "tags": [ "FlowRuntimes" ], "operationId": "FlowRuntimes_CheckCiAvailability", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "computeInstanceName", "in": "query", "required": true, "schema": { "type": "string" } }, { "name": "customAppName", "in": "query", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/AvailabilityResponse" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/checkMirAvailability": { "get": { "tags": [ "FlowRuntimes" ], "operationId": "FlowRuntimes_CheckMirAvailability", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "endpointName", "in": "query", "required": true, "schema": { "type": "string" } }, { "name": "deploymentName", "in": "query", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/AvailabilityResponse" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}/needUpgrade": { "get": { "tags": [ "FlowRuntimes" ], "operationId": "FlowRuntimes_CheckRuntimeUpgrade", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "runtimeName", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "boolean" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}/capability": { "get": { "tags": [ "FlowRuntimes" ], "operationId": "FlowRuntimes_GetRuntimeCapability", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "runtimeName", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowRuntimeCapability" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/latestConfig": { "get": { "tags": [ "FlowRuntimes" ], "operationId": "FlowRuntimes_GetRuntimeLatestConfig", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/RuntimeConfiguration" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes": { "get": { "tags": [ "FlowRuntimes" ], "operationId": "FlowRuntimes_ListRuntimes", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { "$ref": "#/components/schemas/FlowRuntimeDto" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/runtimes/latestConfig": { "get": { "tags": [ "FlowRuntimesWorkspaceIndependent" ], "operationId": "FlowRuntimesWorkspaceIndependent_GetRuntimeLatestConfig", "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/RuntimeConfiguration" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows": { "post": { "tags": [ "Flows" ], "operationId": "Flows_CreateFlow", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "experimentId", "in": "query", "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateFlowRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "get": { "tags": [ "Flows" ], "operationId": "Flows_ListFlows", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "experimentId", "in": "query", "schema": { "type": "string" } }, { "name": "ownedOnly", "in": "query", "schema": { "type": "boolean" } }, { "name": "flowType", "in": "query", "schema": { "$ref": "#/components/schemas/FlowType" } }, { "name": "listViewType", "in": "query", "schema": { "$ref": "#/components/schemas/ListViewType" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { "$ref": "#/components/schemas/FlowBaseDto" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/clone": { "post": { "tags": [ "Flows" ], "operationId": "Flows_CloneFlow", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "experimentId", "in": "query", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateFlowRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/fromsample": { "post": { "tags": [ "Flows" ], "operationId": "Flows_CreateFlowFromSample", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "experimentId", "in": "query", "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateFlowFromSampleRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}": { "put": { "tags": [ "Flows" ], "operationId": "Flows_UpdateFlow", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "experimentId", "in": "query", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/UpdateFlowRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "patch": { "tags": [ "Flows" ], "operationId": "Flows_PatchFlow", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "experimentId", "in": "query", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json-patch+json": { "schema": { "$ref": "#/components/schemas/PatchFlowRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "get": { "tags": [ "Flows" ], "operationId": "Flows_GetFlow", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "experimentId", "in": "query", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/submit": { "post": { "tags": [ "Flows" ], "operationId": "Flows_SubmitFlow", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "experimentId", "in": "query", "required": true, "schema": { "type": "string" } }, { "name": "endpointName", "in": "query", "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/SubmitFlowRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowRunResult" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/{flowRunId}/status": { "get": { "tags": [ "Flows" ], "operationId": "Flows_GetFlowRunStatus", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "experimentId", "in": "query", "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowRunResult" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}": { "get": { "tags": [ "Flows" ], "operationId": "Flows_GetFlowRunInfo", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "experimentId", "in": "query", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowRunInfo" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/childRuns": { "get": { "tags": [ "Flows" ], "operationId": "Flows_GetFlowChildRuns", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "index", "in": "query", "schema": { "type": "integer", "format": "int32" } }, { "name": "startIndex", "in": "query", "schema": { "type": "integer", "format": "int32" } }, { "name": "endIndex", "in": "query", "schema": { "type": "integer", "format": "int32" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/nodeRuns/{nodeName}": { "get": { "tags": [ "Flows" ], "operationId": "Flows_GetFlowNodeRuns", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "nodeName", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "index", "in": "query", "schema": { "type": "integer", "format": "int32" } }, { "name": "startIndex", "in": "query", "schema": { "type": "integer", "format": "int32" } }, { "name": "endIndex", "in": "query", "schema": { "type": "integer", "format": "int32" } }, { "name": "aggregation", "in": "query", "schema": { "type": "boolean", "default": false } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/nodeRuns/{nodeName}/basePath": { "get": { "tags": [ "Flows" ], "operationId": "Flows_GetFlowNodeRunBasePath", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "nodeName", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowRunBasePath" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/clone": { "post": { "tags": [ "Flows" ], "operationId": "Flows_CloneFlowFromFlowRun", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "experimentId", "in": "query", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateFlowRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/bulkTests": { "get": { "tags": [ "Flows" ], "operationId": "Flows_ListBulkTests", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "experimentId", "in": "query", "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { "$ref": "#/components/schemas/BulkTestDto" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/bulkTests/{bulkTestId}": { "get": { "tags": [ "Flows" ], "operationId": "Flows_GetBulkTest", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "bulkTestId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/BulkTestDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/samples": { "get": { "tags": [ "Flows" ], "operationId": "Flows_GetSamples", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "useSnapshot", "in": "query", "schema": { "type": "boolean", "default": false } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowSampleDto" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/evaluateSamples": { "get": { "tags": [ "Flows" ], "operationId": "Flows_GetEvaluateFlowSamples", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "useSnapshot", "in": "query", "schema": { "type": "boolean", "default": false } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowSampleDto" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/DeployReservedEnvironmentVariableNames": { "get": { "tags": [ "Flows" ], "operationId": "Flows_GetFlowDeployReservedEnvironmentVariableNames", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { "type": "string" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/deploy": { "post": { "tags": [ "Flows" ], "operationId": "Flows_DeployFlow", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "asyncCall", "in": "query", "schema": { "type": "boolean", "default": false } }, { "name": "msiToken", "in": "query", "schema": { "type": "boolean", "default": false } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/DeployFlowRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/logContent": { "get": { "tags": [ "Flows" ], "operationId": "Flows_GetFlowRunLogContent", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/runs/{flowRunId}/cancel": { "post": { "tags": [ "Flows" ], "operationId": "Flows_CancelFlowRun", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "text/plain": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/flowTests/{flowRunId}/cancel": { "post": { "tags": [ "Flows" ], "operationId": "Flows_CancelFlowTest", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "flowRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "text/plain": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/bulkTests/{bulkTestRunId}/cancel": { "post": { "tags": [ "Flows" ], "operationId": "Flows_CancelBulkTestRun", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "bulkTestRunId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "text/plain": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/FlowSnapshot": { "post": { "tags": [ "Flows" ], "operationId": "Flows_GetFlowSnapshot", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateFlowRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowSnapshot" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/connectionOverride": { "post": { "tags": [ "Flows" ], "operationId": "Flows_GetConnectionOverrideSettings", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "runtimeName", "in": "query", "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowGraphReference" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { "$ref": "#/components/schemas/ConnectionOverrideSetting" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/flowInputs": { "post": { "tags": [ "Flows" ], "operationId": "Flows_GetFlowInputs", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowGraphReference" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowInputDefinition" }, "description": "This is a dictionary" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/LoadAsComponent": { "post": { "tags": [ "Flows" ], "operationId": "Flows_LoadAsComponent", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/LoadFlowAsComponentRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/flowTools": { "get": { "tags": [ "Flows" ], "operationId": "Flows_GetFlowTools", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "flowRuntimeName", "in": "query", "schema": { "type": "string" } }, { "name": "experimentId", "in": "query", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowToolsDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/sessions": { "post": { "tags": [ "Flows" ], "operationId": "Flows_SetupFlowSession", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "experimentId", "in": "query", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/SetupFlowSessionRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/IActionResult" } } } }, "202": { "description": "Accepted", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/IActionResult" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "delete": { "tags": [ "Flows" ], "operationId": "Flows_DeleteFlowSession", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "experimentId", "in": "query", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/IActionResult" } } } }, "202": { "description": "Accepted", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/IActionResult" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/sessions/status": { "get": { "tags": [ "Flows" ], "operationId": "Flows_GetFlowSessionStatus", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "experimentId", "in": "query", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/FlowSessionDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}": { "post": { "tags": [ "FlowSessions" ], "operationId": "FlowSessions_CreateFlowSession", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "sessionId", "in": "path", "required": true, "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateFlowSessionRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/IActionResult" } } } }, "202": { "description": "Accepted", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/IActionResult" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "get": { "tags": [ "FlowSessions" ], "operationId": "FlowSessions_GetFlowSession", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "sessionId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/GetTrainingSessionDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } }, "delete": { "tags": [ "FlowSessions" ], "operationId": "FlowSessions_DeleteFlowSession", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "sessionId", "in": "path", "required": true, "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/IActionResult" } } } }, "202": { "description": "Accepted", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/IActionResult" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}/{actionType}/locations/{location}/operations/{operationId}": { "get": { "tags": [ "FlowSessions" ], "operationId": "FlowSessions_PollOperationStatus", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "sessionId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "actionType", "in": "path", "required": true, "schema": { "$ref": "#/components/schemas/SetupFlowSessionAction" } }, { "name": "location", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "operationId", "in": "path", "required": true, "schema": { "type": "string" } }, { "name": "api-version", "in": "query", "schema": { "type": "string" } }, { "name": "type", "in": "query", "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/IActionResult" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/standbypools": { "get": { "tags": [ "FlowSessions" ], "operationId": "FlowSessions_GetStandbyPools", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { "$ref": "#/components/schemas/StandbyPoolProperties" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/v1.0/flows/getIndexEntities": { "post": { "tags": [ "FlowsProvider" ], "operationId": "FlowsProvider_GetIndexEntityById", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/UnversionedEntityRequestDto" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/UnversionedEntityResponseDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/v1.0/flows/rebuildIndex": { "post": { "tags": [ "FlowsProvider" ], "operationId": "FlowsProvider_GetUpdatedEntityIdsForWorkspace", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/UnversionedRebuildIndexDto" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/UnversionedRebuildResponseDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/setting": { "get": { "tags": [ "Tools" ], "operationId": "Tools_GetToolSetting", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ToolSetting" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/samples": { "get": { "tags": [ "Tools" ], "operationId": "Tools_GetSamples", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/Tool" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta": { "post": { "tags": [ "Tools" ], "operationId": "Tools_GetToolMeta", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "toolName", "in": "query", "required": true, "schema": { "type": "string" } }, { "name": "toolType", "in": "query", "required": true, "schema": { "type": "string" } }, { "name": "endpointName", "in": "query", "schema": { "type": "string" } }, { "name": "flowRuntimeName", "in": "query", "schema": { "type": "string" } }, { "name": "flowId", "in": "query", "schema": { "type": "string" } } ], "requestBody": { "content": { "text/plain": { "schema": { "type": "string" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "string" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta-v2": { "post": { "tags": [ "Tools" ], "operationId": "Tools_GetToolMetaV2", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowRuntimeName", "in": "query", "schema": { "type": "string" } }, { "name": "flowId", "in": "query", "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/GenerateToolMetaRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ToolMetaDto" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/packageTools": { "get": { "tags": [ "Tools" ], "operationId": "Tools_GetPackageTools", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowRuntimeName", "in": "query", "schema": { "type": "string" } }, { "name": "flowId", "in": "query", "schema": { "type": "string" } } ], "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/Tool" } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/dynamicList": { "post": { "tags": [ "Tools" ], "operationId": "Tools_GetDynamicList", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowRuntimeName", "in": "query", "schema": { "type": "string" } }, { "name": "flowId", "in": "query", "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/GetDynamicListRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "type": "array", "items": { } } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } }, "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/RetrieveToolFuncResult": { "post": { "tags": [ "Tools" ], "operationId": "Tools_RetrieveToolFuncResult", "parameters": [ { "$ref": "#/components/parameters/subscriptionIdParameter" }, { "$ref": "#/components/parameters/resourceGroupNameParameter" }, { "$ref": "#/components/parameters/workspaceNameParameter" }, { "name": "flowRuntimeName", "in": "query", "schema": { "type": "string" } }, { "name": "flowId", "in": "query", "schema": { "type": "string" } } ], "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/RetrieveToolFuncResultRequest" } } } }, "responses": { "200": { "description": "Success", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ToolFuncResponse" } } } }, "default": { "description": "Error response describing why the operation failed.", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } } } }, "components": { "schemas": { "ACIAdvanceSettings": { "type": "object", "properties": { "containerResourceRequirements": { "$ref": "#/components/schemas/ContainerResourceRequirements" }, "appInsightsEnabled": { "type": "boolean", "nullable": true }, "sslEnabled": { "type": "boolean", "nullable": true }, "sslCertificate": { "type": "string", "nullable": true }, "sslKey": { "type": "string", "nullable": true }, "cName": { "type": "string", "nullable": true }, "dnsNameLabel": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AEVAAssetType": { "enum": [ "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel" ], "type": "string" }, "AEVAComputeConfiguration": { "type": "object", "properties": { "target": { "type": "string", "nullable": true }, "instanceCount": { "type": "integer", "format": "int32", "nullable": true }, "isLocal": { "type": "boolean" }, "location": { "type": "string", "nullable": true }, "isClusterless": { "type": "boolean" }, "instanceType": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "isPreemptable": { "type": "boolean" } }, "additionalProperties": false }, "AEVADataStoreMode": { "enum": [ "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link" ], "type": "string" }, "AEVAIdentityType": { "enum": [ "UserIdentity", "Managed", "AMLToken" ], "type": "string" }, "AEVAResourceConfiguration": { "type": "object", "properties": { "instanceCount": { "type": "integer", "format": "int32", "nullable": true }, "instanceType": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "locations": { "type": "array", "items": { "type": "string" }, "nullable": true }, "instancePriority": { "type": "string", "nullable": true }, "quotaEnforcementResourceId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AISuperComputerConfiguration": { "type": "object", "properties": { "instanceType": { "type": "string", "nullable": true }, "instanceTypes": { "type": "array", "items": { "type": "string" }, "nullable": true }, "imageVersion": { "type": "string", "nullable": true }, "location": { "type": "string", "nullable": true }, "locations": { "type": "array", "items": { "type": "string" }, "nullable": true }, "aiSuperComputerStorageData": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AISuperComputerStorageReferenceConfiguration" }, "nullable": true }, "interactive": { "type": "boolean" }, "scalePolicy": { "$ref": "#/components/schemas/AISuperComputerScalePolicy" }, "virtualClusterArmId": { "type": "string", "nullable": true }, "tensorboardLogDirectory": { "type": "string", "nullable": true }, "sshPublicKey": { "type": "string", "nullable": true }, "sshPublicKeys": { "type": "array", "items": { "type": "string" }, "nullable": true }, "enableAzmlInt": { "type": "boolean" }, "priority": { "type": "string", "nullable": true }, "slaTier": { "type": "string", "nullable": true }, "suspendOnIdleTimeHours": { "type": "integer", "format": "int64", "nullable": true }, "userAlias": { "type": "string", "nullable": true }, "quotaEnforcementResourceId": { "type": "string", "nullable": true }, "modelComputeSpecificationId": { "type": "string", "nullable": true }, "groupPolicyName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AISuperComputerScalePolicy": { "type": "object", "properties": { "autoScaleInstanceTypeCountSet": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true }, "autoScaleIntervalInSec": { "type": "integer", "format": "int32", "nullable": true }, "maxInstanceTypeCount": { "type": "integer", "format": "int32", "nullable": true }, "minInstanceTypeCount": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "AISuperComputerStorageReferenceConfiguration": { "type": "object", "properties": { "containerName": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AKSAdvanceSettings": { "type": "object", "properties": { "autoScaler": { "$ref": "#/components/schemas/AutoScaler" }, "containerResourceRequirements": { "$ref": "#/components/schemas/ContainerResourceRequirements" }, "appInsightsEnabled": { "type": "boolean", "nullable": true }, "scoringTimeoutMs": { "type": "integer", "format": "int32", "nullable": true }, "numReplicas": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "AKSReplicaStatus": { "type": "object", "properties": { "desiredReplicas": { "type": "integer", "format": "int32" }, "updatedReplicas": { "type": "integer", "format": "int32" }, "availableReplicas": { "type": "integer", "format": "int32" }, "error": { "$ref": "#/components/schemas/ModelManagementErrorResponse" } }, "additionalProperties": false }, "AMLComputeConfiguration": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "vmSize": { "type": "string", "nullable": true }, "vmPriority": { "$ref": "#/components/schemas/VmPriority" }, "retainCluster": { "type": "boolean" }, "clusterMaxNodeCount": { "type": "integer", "format": "int32", "nullable": true }, "osType": { "type": "string", "nullable": true }, "virtualMachineImage": { "type": "string", "nullable": true } }, "additionalProperties": false }, "APCloudConfiguration": { "type": "object", "properties": { "referencedAPModuleGuid": { "type": "string", "nullable": true }, "userAlias": { "type": "string", "nullable": true }, "aetherModuleType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ActionType": { "enum": [ "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent" ], "type": "string" }, "Activate": { "type": "object", "properties": { "when": { "type": "string", "nullable": true }, "is": { "nullable": true } }, "additionalProperties": false }, "AdditionalErrorInfo": { "type": "object", "properties": { "type": { "type": "string", "nullable": true }, "info": { "nullable": true } }, "additionalProperties": false }, "AdhocTriggerScheduledCommandJobRequest": { "type": "object", "properties": { "jobName": { "type": "string", "nullable": true }, "jobDisplayName": { "type": "string", "nullable": true }, "triggerTimeString": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AdhocTriggerScheduledSparkJobRequest": { "type": "object", "properties": { "jobName": { "type": "string", "nullable": true }, "jobDisplayName": { "type": "string", "nullable": true }, "triggerTimeString": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherAPCloudConfiguration": { "type": "object", "properties": { "referencedAPModuleGuid": { "type": "string", "nullable": true }, "userAlias": { "type": "string", "nullable": true }, "aetherModuleType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherAmlDataset": { "type": "object", "properties": { "registeredDataSetReference": { "$ref": "#/components/schemas/AetherRegisteredDataSetReference" }, "savedDataSetReference": { "$ref": "#/components/schemas/AetherSavedDataSetReference" }, "additionalTransformations": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherAmlSparkCloudSetting": { "type": "object", "properties": { "entry": { "$ref": "#/components/schemas/AetherEntrySetting" }, "files": { "type": "array", "items": { "type": "string" }, "nullable": true }, "archives": { "type": "array", "items": { "type": "string" }, "nullable": true }, "jars": { "type": "array", "items": { "type": "string" }, "nullable": true }, "pyFiles": { "type": "array", "items": { "type": "string" }, "nullable": true }, "driverMemory": { "type": "string", "nullable": true }, "driverCores": { "type": "integer", "format": "int32", "nullable": true }, "executorMemory": { "type": "string", "nullable": true }, "executorCores": { "type": "integer", "format": "int32", "nullable": true }, "numberExecutors": { "type": "integer", "format": "int32", "nullable": true }, "environmentAssetId": { "type": "string", "nullable": true }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "inlineEnvironmentDefinitionString": { "type": "string", "nullable": true }, "conf": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "compute": { "type": "string", "nullable": true }, "resources": { "$ref": "#/components/schemas/AetherResourcesSetting" }, "identity": { "$ref": "#/components/schemas/AetherIdentitySetting" } }, "additionalProperties": false }, "AetherArgumentAssignment": { "type": "object", "properties": { "valueType": { "$ref": "#/components/schemas/AetherArgumentValueType" }, "value": { "type": "string", "nullable": true }, "nestedArgumentList": { "type": "array", "items": { "$ref": "#/components/schemas/AetherArgumentAssignment" }, "nullable": true }, "stringInterpolationArgumentList": { "type": "array", "items": { "$ref": "#/components/schemas/AetherArgumentAssignment" }, "nullable": true } }, "additionalProperties": false }, "AetherArgumentValueType": { "enum": [ "Literal", "Parameter", "Input", "Output", "NestedList", "StringInterpolationList" ], "type": "string" }, "AetherAssetDefinition": { "type": "object", "properties": { "path": { "type": "string", "nullable": true }, "type": { "$ref": "#/components/schemas/AetherAssetType" }, "assetId": { "type": "string", "nullable": true }, "initialAssetId": { "type": "string", "nullable": true }, "serializedAssetId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherAssetOutputSettings": { "type": "object", "properties": { "path": { "type": "string", "nullable": true }, "PathParameterAssignment": { "$ref": "#/components/schemas/AetherParameterAssignment" }, "type": { "$ref": "#/components/schemas/AetherAssetType" }, "options": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataStoreMode": { "$ref": "#/components/schemas/AetherDataStoreMode" }, "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherAssetType": { "enum": [ "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel" ], "type": "string" }, "AetherAutoFeaturizeConfiguration": { "type": "object", "properties": { "featurizationConfig": { "$ref": "#/components/schemas/AetherFeaturizationSettings" } }, "additionalProperties": false }, "AetherAutoMLComponentConfiguration": { "type": "object", "properties": { "autoTrainConfig": { "$ref": "#/components/schemas/AetherAutoTrainConfiguration" }, "autoFeaturizeConfig": { "$ref": "#/components/schemas/AetherAutoFeaturizeConfiguration" } }, "additionalProperties": false }, "AetherAutoTrainConfiguration": { "type": "object", "properties": { "generalSettings": { "$ref": "#/components/schemas/AetherGeneralSettings" }, "limitSettings": { "$ref": "#/components/schemas/AetherLimitSettings" }, "dataSettings": { "$ref": "#/components/schemas/AetherDataSettings" }, "forecastingSettings": { "$ref": "#/components/schemas/AetherForecastingSettings" }, "trainingSettings": { "$ref": "#/components/schemas/AetherTrainingSettings" }, "sweepSettings": { "$ref": "#/components/schemas/AetherSweepSettings" }, "imageModelSettings": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "computeConfiguration": { "$ref": "#/components/schemas/AetherComputeConfiguration" }, "resourceConfigurtion": { "$ref": "#/components/schemas/AetherResourceConfiguration" }, "environmentId": { "type": "string", "nullable": true }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } }, "additionalProperties": false }, "AetherAzureBlobReference": { "type": "object", "properties": { "container": { "type": "string", "nullable": true }, "sasToken": { "type": "string", "nullable": true }, "uri": { "type": "string", "nullable": true }, "account": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "pathType": { "$ref": "#/components/schemas/AetherFileBasedPathType" }, "amlDataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherAzureDataLakeGen2Reference": { "type": "object", "properties": { "fileSystemName": { "type": "string", "nullable": true }, "uri": { "type": "string", "nullable": true }, "account": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "pathType": { "$ref": "#/components/schemas/AetherFileBasedPathType" }, "amlDataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherAzureDataLakeReference": { "type": "object", "properties": { "tenant": { "type": "string", "nullable": true }, "subscription": { "type": "string", "nullable": true }, "resourceGroup": { "type": "string", "nullable": true }, "dataLakeUri": { "type": "string", "nullable": true }, "uri": { "type": "string", "nullable": true }, "account": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "pathType": { "$ref": "#/components/schemas/AetherFileBasedPathType" }, "amlDataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherAzureDatabaseReference": { "type": "object", "properties": { "serverUri": { "type": "string", "nullable": true }, "databaseName": { "type": "string", "nullable": true }, "tableName": { "type": "string", "nullable": true }, "sqlQuery": { "type": "string", "nullable": true }, "storedProcedureName": { "type": "string", "nullable": true }, "storedProcedureParameters": { "type": "array", "items": { "$ref": "#/components/schemas/AetherStoredProcedureParameter" }, "nullable": true }, "amlDataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherAzureFilesReference": { "type": "object", "properties": { "share": { "type": "string", "nullable": true }, "uri": { "type": "string", "nullable": true }, "account": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "pathType": { "$ref": "#/components/schemas/AetherFileBasedPathType" }, "amlDataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherBatchAiComputeInfo": { "type": "object", "properties": { "batchAiSubscriptionId": { "type": "string", "nullable": true }, "batchAiResourceGroup": { "type": "string", "nullable": true }, "batchAiWorkspaceName": { "type": "string", "nullable": true }, "clusterName": { "type": "string", "nullable": true }, "nativeSharedDirectory": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherBuildArtifactInfo": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/AetherBuildSourceType" }, "cloudBuildDropPathInfo": { "$ref": "#/components/schemas/AetherCloudBuildDropPathInfo" }, "vsoBuildArtifactInfo": { "$ref": "#/components/schemas/AetherVsoBuildArtifactInfo" } }, "additionalProperties": false }, "AetherBuildSourceType": { "enum": [ "CloudBuild", "Vso", "VsoGit" ], "type": "string" }, "AetherCloudBuildDropPathInfo": { "type": "object", "properties": { "buildInfo": { "$ref": "#/components/schemas/AetherCloudBuildInfo" }, "root": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherCloudBuildInfo": { "type": "object", "properties": { "queueInfo": { "$ref": "#/components/schemas/AetherCloudBuildQueueInfo" }, "buildId": { "type": "string", "nullable": true }, "dropUrl": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherCloudBuildQueueInfo": { "type": "object", "properties": { "buildQueue": { "type": "string", "nullable": true }, "buildRole": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherCloudPrioritySetting": { "type": "object", "properties": { "scopePriority": { "$ref": "#/components/schemas/AetherPriorityConfiguration" }, "AmlComputePriority": { "$ref": "#/components/schemas/AetherPriorityConfiguration" }, "ItpPriority": { "$ref": "#/components/schemas/AetherPriorityConfiguration" }, "SingularityPriority": { "$ref": "#/components/schemas/AetherPriorityConfiguration" } }, "additionalProperties": false }, "AetherCloudSettings": { "type": "object", "properties": { "linkedSettings": { "type": "array", "items": { "$ref": "#/components/schemas/AetherParameterAssignment" }, "nullable": true }, "priorityConfig": { "$ref": "#/components/schemas/AetherPriorityConfiguration" }, "hdiRunConfig": { "$ref": "#/components/schemas/AetherHdiRunConfiguration" }, "subGraphConfig": { "$ref": "#/components/schemas/AetherSubGraphConfiguration" }, "autoMLComponentConfig": { "$ref": "#/components/schemas/AetherAutoMLComponentConfiguration" }, "apCloudConfig": { "$ref": "#/components/schemas/AetherAPCloudConfiguration" }, "scopeCloudConfig": { "$ref": "#/components/schemas/AetherScopeCloudConfiguration" }, "esCloudConfig": { "$ref": "#/components/schemas/AetherEsCloudConfiguration" }, "dataTransferCloudConfig": { "$ref": "#/components/schemas/AetherDataTransferCloudConfiguration" }, "amlSparkCloudSetting": { "$ref": "#/components/schemas/AetherAmlSparkCloudSetting" }, "dataTransferV2CloudSetting": { "$ref": "#/components/schemas/AetherDataTransferV2CloudSetting" } }, "additionalProperties": false }, "AetherColumnTransformer": { "type": "object", "properties": { "fields": { "type": "array", "items": { "type": "string" }, "nullable": true }, "parameters": { "nullable": true } }, "additionalProperties": false }, "AetherComputeConfiguration": { "type": "object", "properties": { "target": { "type": "string", "nullable": true }, "instanceCount": { "type": "integer", "format": "int32", "nullable": true }, "isLocal": { "type": "boolean" }, "location": { "type": "string", "nullable": true }, "isClusterless": { "type": "boolean" }, "instanceType": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "isPreemptable": { "type": "boolean" } }, "additionalProperties": false }, "AetherComputeSetting": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "computeType": { "$ref": "#/components/schemas/AetherComputeType" }, "batchAiComputeInfo": { "$ref": "#/components/schemas/AetherBatchAiComputeInfo" }, "remoteDockerComputeInfo": { "$ref": "#/components/schemas/AetherRemoteDockerComputeInfo" }, "hdiClusterComputeInfo": { "$ref": "#/components/schemas/AetherHdiClusterComputeInfo" }, "mlcComputeInfo": { "$ref": "#/components/schemas/AetherMlcComputeInfo" }, "databricksComputeInfo": { "$ref": "#/components/schemas/AetherDatabricksComputeInfo" } }, "additionalProperties": false }, "AetherComputeType": { "enum": [ "BatchAi", "MLC", "HdiCluster", "RemoteDocker", "Databricks", "Aisc" ], "type": "string" }, "AetherControlFlowType": { "enum": [ "None", "DoWhile", "ParallelFor" ], "type": "string" }, "AetherControlInput": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "defaultValue": { "$ref": "#/components/schemas/AetherControlInputValue" } }, "additionalProperties": false }, "AetherControlInputValue": { "enum": [ "None", "False", "True", "Skipped" ], "type": "string" }, "AetherControlOutput": { "type": "object", "properties": { "name": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherControlType": { "enum": [ "IfElse" ], "type": "string" }, "AetherCopyDataTask": { "type": "object", "properties": { "DataCopyMode": { "$ref": "#/components/schemas/AetherDataCopyMode" } }, "additionalProperties": false }, "AetherCosmosReference": { "type": "object", "properties": { "cluster": { "type": "string", "nullable": true }, "vc": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherCreatedBy": { "type": "object", "properties": { "userObjectId": { "type": "string", "nullable": true }, "userTenantId": { "type": "string", "nullable": true }, "userName": { "type": "string", "nullable": true }, "puid": { "type": "string", "nullable": true }, "iss": { "type": "string", "nullable": true }, "idp": { "type": "string", "nullable": true }, "altsecId": { "type": "string", "nullable": true }, "sourceIp": { "type": "string", "nullable": true }, "skipRegistryPrivateLinkCheck": { "type": "boolean" } }, "additionalProperties": false }, "AetherCustomReference": { "type": "object", "properties": { "amlDataStoreName": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherDBFSReference": { "type": "object", "properties": { "relativePath": { "type": "string", "nullable": true }, "amlDataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherDataCopyMode": { "enum": [ "MergeWithOverwrite", "FailIfConflict" ], "type": "string" }, "AetherDataLocation": { "type": "object", "properties": { "storageType": { "$ref": "#/components/schemas/AetherDataLocationStorageType" }, "storageId": { "type": "string", "nullable": true }, "uri": { "type": "string", "nullable": true }, "dataStoreName": { "type": "string", "nullable": true }, "dataReference": { "$ref": "#/components/schemas/AetherDataReference" }, "amlDataset": { "$ref": "#/components/schemas/AetherAmlDataset" }, "assetDefinition": { "$ref": "#/components/schemas/AetherAssetDefinition" }, "isCompliant": { "type": "boolean" }, "reuseCalculationFields": { "$ref": "#/components/schemas/AetherDataLocationReuseCalculationFields" } }, "additionalProperties": false }, "AetherDataLocationReuseCalculationFields": { "type": "object", "properties": { "dataStoreName": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "dataExperimentId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherDataLocationStorageType": { "enum": [ "Cosmos", "AzureBlob", "Artifact", "Snapshot", "SavedAmlDataset", "Asset" ], "type": "string" }, "AetherDataPath": { "type": "object", "properties": { "dataStoreName": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "sqlDataPath": { "$ref": "#/components/schemas/AetherSqlDataPath" } }, "additionalProperties": false }, "AetherDataReference": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/AetherDataReferenceType" }, "azureBlobReference": { "$ref": "#/components/schemas/AetherAzureBlobReference" }, "azureDataLakeReference": { "$ref": "#/components/schemas/AetherAzureDataLakeReference" }, "azureFilesReference": { "$ref": "#/components/schemas/AetherAzureFilesReference" }, "cosmosReference": { "$ref": "#/components/schemas/AetherCosmosReference" }, "phillyHdfsReference": { "$ref": "#/components/schemas/AetherPhillyHdfsReference" }, "azureSqlDatabaseReference": { "$ref": "#/components/schemas/AetherAzureDatabaseReference" }, "azurePostgresDatabaseReference": { "$ref": "#/components/schemas/AetherAzureDatabaseReference" }, "azureDataLakeGen2Reference": { "$ref": "#/components/schemas/AetherAzureDataLakeGen2Reference" }, "dbfsReference": { "$ref": "#/components/schemas/AetherDBFSReference" }, "azureMySqlDatabaseReference": { "$ref": "#/components/schemas/AetherAzureDatabaseReference" }, "customReference": { "$ref": "#/components/schemas/AetherCustomReference" }, "hdfsReference": { "$ref": "#/components/schemas/AetherHdfsReference" } }, "additionalProperties": false }, "AetherDataReferenceType": { "enum": [ "None", "AzureBlob", "AzureDataLake", "AzureFiles", "Cosmos", "PhillyHdfs", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs" ], "type": "string" }, "AetherDataSetDefinition": { "type": "object", "properties": { "dataTypeShortName": { "type": "string", "nullable": true }, "parameterName": { "type": "string", "nullable": true }, "value": { "$ref": "#/components/schemas/AetherDataSetDefinitionValue" } }, "additionalProperties": false }, "AetherDataSetDefinitionValue": { "type": "object", "properties": { "literalValue": { "$ref": "#/components/schemas/AetherDataPath" }, "dataSetReference": { "$ref": "#/components/schemas/AetherRegisteredDataSetReference" }, "savedDataSetReference": { "$ref": "#/components/schemas/AetherSavedDataSetReference" }, "assetDefinition": { "$ref": "#/components/schemas/AetherAssetDefinition" } }, "additionalProperties": false }, "AetherDataSettings": { "type": "object", "properties": { "targetColumnName": { "type": "string", "nullable": true }, "weightColumnName": { "type": "string", "nullable": true }, "positiveLabel": { "type": "string", "nullable": true }, "validationData": { "$ref": "#/components/schemas/AetherValidationDataSettings" }, "testData": { "$ref": "#/components/schemas/AetherTestDataSettings" } }, "additionalProperties": false }, "AetherDataStoreMode": { "enum": [ "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link" ], "type": "string" }, "AetherDataTransferCloudConfiguration": { "type": "object", "properties": { "AllowOverwrite": { "type": "boolean", "nullable": true } }, "additionalProperties": false }, "AetherDataTransferSink": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/AetherDataTransferStorageType" }, "fileSystem": { "$ref": "#/components/schemas/AetherFileSystem" }, "databaseSink": { "$ref": "#/components/schemas/AetherDatabaseSink" } }, "additionalProperties": false }, "AetherDataTransferSource": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/AetherDataTransferStorageType" }, "fileSystem": { "$ref": "#/components/schemas/AetherFileSystem" }, "databaseSource": { "$ref": "#/components/schemas/AetherDatabaseSource" } }, "additionalProperties": false }, "AetherDataTransferStorageType": { "enum": [ "DataBase", "FileSystem" ], "type": "string" }, "AetherDataTransferTaskType": { "enum": [ "ImportData", "ExportData", "CopyData" ], "type": "string" }, "AetherDataTransferV2CloudSetting": { "type": "object", "properties": { "taskType": { "$ref": "#/components/schemas/AetherDataTransferTaskType" }, "ComputeName": { "type": "string", "nullable": true }, "CopyDataTask": { "$ref": "#/components/schemas/AetherCopyDataTask" }, "ImportDataTask": { "$ref": "#/components/schemas/AetherImportDataTask" }, "ExportDataTask": { "$ref": "#/components/schemas/AetherExportDataTask" }, "DataTransferSources": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AetherDataTransferSource" }, "description": "This is a dictionary", "nullable": true }, "DataTransferSinks": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AetherDataTransferSink" }, "description": "This is a dictionary", "nullable": true }, "DataCopyMode": { "$ref": "#/components/schemas/AetherDataCopyMode" } }, "additionalProperties": false }, "AetherDatabaseSink": { "type": "object", "properties": { "connection": { "type": "string", "nullable": true }, "table": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherDatabaseSource": { "type": "object", "properties": { "connection": { "type": "string", "nullable": true }, "query": { "type": "string", "nullable": true }, "storedProcedureName": { "type": "string", "nullable": true }, "storedProcedureParameters": { "type": "array", "items": { "$ref": "#/components/schemas/AetherStoredProcedureParameter" }, "nullable": true } }, "additionalProperties": false }, "AetherDatabricksComputeInfo": { "type": "object", "properties": { "existingClusterId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherDatasetOutput": { "type": "object", "properties": { "datasetType": { "$ref": "#/components/schemas/AetherDatasetType" }, "datasetRegistration": { "$ref": "#/components/schemas/AetherDatasetRegistration" }, "datasetOutputOptions": { "$ref": "#/components/schemas/AetherDatasetOutputOptions" } }, "additionalProperties": false }, "AetherDatasetOutputOptions": { "type": "object", "properties": { "sourceGlobs": { "$ref": "#/components/schemas/AetherGlobsOptions" }, "pathOnDatastore": { "type": "string", "nullable": true }, "PathOnDatastoreParameterAssignment": { "$ref": "#/components/schemas/AetherParameterAssignment" } }, "additionalProperties": false }, "AetherDatasetRegistration": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "createNewVersion": { "type": "boolean" }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "additionalTransformations": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherDatasetType": { "enum": [ "File", "Tabular" ], "type": "string" }, "AetherDatastoreSetting": { "type": "object", "properties": { "dataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherDoWhileControlFlowInfo": { "type": "object", "properties": { "outputPortNameToInputPortNamesMapping": { "type": "object", "additionalProperties": { "type": "array", "items": { "type": "string" }, "nullable": true }, "nullable": true }, "conditionOutputPortName": { "type": "string", "nullable": true }, "runSettings": { "$ref": "#/components/schemas/AetherDoWhileControlFlowRunSettings" } }, "additionalProperties": false }, "AetherDoWhileControlFlowRunSettings": { "type": "object", "properties": { "maxLoopIterationCount": { "$ref": "#/components/schemas/AetherParameterAssignment" } }, "additionalProperties": false }, "AetherDockerSettingConfiguration": { "type": "object", "properties": { "useDocker": { "type": "boolean", "nullable": true }, "sharedVolumes": { "type": "boolean", "nullable": true }, "shmSize": { "type": "string", "nullable": true }, "arguments": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "AetherEarlyTerminationPolicyType": { "enum": [ "Bandit", "MedianStopping", "TruncationSelection" ], "type": "string" }, "AetherEntityInterfaceDocumentation": { "type": "object", "properties": { "inputsDocumentation": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "outputsDocumentation": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "parametersDocumentation": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } }, "additionalProperties": false }, "AetherEntityStatus": { "enum": [ "Active", "Deprecated", "Disabled" ], "type": "string" }, "AetherEntrySetting": { "type": "object", "properties": { "file": { "type": "string", "nullable": true }, "className": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherEnvironmentConfiguration": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "useEnvironmentDefinition": { "type": "boolean" }, "environmentDefinitionString": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherEsCloudConfiguration": { "type": "object", "properties": { "enableOutputToFileBasedOnDataTypeId": { "type": "boolean", "nullable": true }, "amlComputePriorityInternal": { "$ref": "#/components/schemas/AetherPriorityConfiguration" }, "itpPriorityInternal": { "$ref": "#/components/schemas/AetherPriorityConfiguration" }, "singularityPriorityInternal": { "$ref": "#/components/schemas/AetherPriorityConfiguration" }, "environment": { "$ref": "#/components/schemas/AetherEnvironmentConfiguration" }, "hyperDriveConfiguration": { "$ref": "#/components/schemas/AetherHyperDriveConfiguration" }, "k8sConfig": { "$ref": "#/components/schemas/AetherK8sConfiguration" }, "resourceConfig": { "$ref": "#/components/schemas/AetherResourceConfiguration" }, "torchDistributedConfig": { "$ref": "#/components/schemas/AetherTorchDistributedConfiguration" }, "targetSelectorConfig": { "$ref": "#/components/schemas/AetherTargetSelectorConfiguration" }, "dockerConfig": { "$ref": "#/components/schemas/AetherDockerSettingConfiguration" }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "maxRunDurationSeconds": { "type": "integer", "format": "int32", "nullable": true }, "identity": { "$ref": "#/components/schemas/AetherIdentitySetting" }, "applicationEndpoints": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ApplicationEndpointConfiguration" }, "nullable": true }, "runConfig": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherExecutionEnvironment": { "enum": [ "ExeWorkerMachine", "DockerContainerWithoutNetwork", "DockerContainerWithNetwork", "HyperVWithoutNetwork", "HyperVWithNetwork" ], "type": "string" }, "AetherExecutionPhase": { "enum": [ "Execution", "Initialization", "Finalization" ], "type": "string" }, "AetherExportDataTask": { "type": "object", "properties": { "DataTransferSink": { "$ref": "#/components/schemas/AetherDataTransferSink" } }, "additionalProperties": false }, "AetherFeaturizationMode": { "enum": [ "Auto", "Custom", "Off" ], "type": "string" }, "AetherFeaturizationSettings": { "type": "object", "properties": { "mode": { "$ref": "#/components/schemas/AetherFeaturizationMode" }, "blockedTransformers": { "type": "array", "items": { "type": "string" }, "nullable": true }, "columnPurposes": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "dropColumns": { "type": "array", "items": { "type": "string" }, "nullable": true }, "transformerParams": { "type": "object", "additionalProperties": { "type": "array", "items": { "$ref": "#/components/schemas/AetherColumnTransformer" }, "nullable": true }, "nullable": true }, "datasetLanguage": { "type": "string", "nullable": true }, "enableDnnFeaturization": { "type": "boolean", "nullable": true } }, "additionalProperties": false }, "AetherFileBasedPathType": { "enum": [ "Unknown", "File", "Folder" ], "type": "string" }, "AetherFileSystem": { "type": "object", "properties": { "connection": { "type": "string", "nullable": true }, "path": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherForecastHorizon": { "type": "object", "properties": { "mode": { "$ref": "#/components/schemas/AetherForecastHorizonMode" }, "value": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "AetherForecastHorizonMode": { "enum": [ "Auto", "Custom" ], "type": "string" }, "AetherForecastingSettings": { "type": "object", "properties": { "countryOrRegionForHolidays": { "type": "string", "nullable": true }, "timeColumnName": { "type": "string", "nullable": true }, "targetLags": { "$ref": "#/components/schemas/AetherTargetLags" }, "targetRollingWindowSize": { "$ref": "#/components/schemas/AetherTargetRollingWindowSize" }, "forecastHorizon": { "$ref": "#/components/schemas/AetherForecastHorizon" }, "timeSeriesIdColumnNames": { "type": "array", "items": { "type": "string" }, "nullable": true }, "frequency": { "type": "string", "nullable": true }, "featureLags": { "type": "string", "nullable": true }, "seasonality": { "$ref": "#/components/schemas/AetherSeasonality" }, "shortSeriesHandlingConfig": { "$ref": "#/components/schemas/AetherShortSeriesHandlingConfiguration" }, "useStl": { "$ref": "#/components/schemas/AetherUseStl" }, "targetAggregateFunction": { "$ref": "#/components/schemas/AetherTargetAggregationFunction" }, "cvStepSize": { "type": "integer", "format": "int32", "nullable": true }, "featuresUnknownAtForecastTime": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "AetherGeneralSettings": { "type": "object", "properties": { "primaryMetric": { "$ref": "#/components/schemas/AetherPrimaryMetrics" }, "taskType": { "$ref": "#/components/schemas/AetherTaskType" }, "logVerbosity": { "$ref": "#/components/schemas/AetherLogVerbosity" } }, "additionalProperties": false }, "AetherGlobsOptions": { "type": "object", "properties": { "globPatterns": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "AetherGraphControlNode": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "controlType": { "$ref": "#/components/schemas/AetherControlType" }, "controlParameter": { "$ref": "#/components/schemas/AetherParameterAssignment" }, "runAttribution": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherGraphControlReferenceNode": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "comment": { "type": "string", "nullable": true }, "controlFlowType": { "$ref": "#/components/schemas/AetherControlFlowType" }, "referenceNodeId": { "type": "string", "nullable": true }, "doWhileControlFlowInfo": { "$ref": "#/components/schemas/AetherDoWhileControlFlowInfo" }, "parallelForControlFlowInfo": { "$ref": "#/components/schemas/AetherParallelForControlFlowInfo" }, "runAttribution": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherGraphDatasetNode": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "datasetId": { "type": "string", "nullable": true }, "dataPathParameterName": { "type": "string", "nullable": true }, "dataSetDefinition": { "$ref": "#/components/schemas/AetherDataSetDefinition" } }, "additionalProperties": false }, "AetherGraphEdge": { "type": "object", "properties": { "sourceOutputPort": { "$ref": "#/components/schemas/AetherPortInfo" }, "destinationInputPort": { "$ref": "#/components/schemas/AetherPortInfo" } }, "additionalProperties": false }, "AetherGraphEntity": { "type": "object", "properties": { "moduleNodes": { "type": "array", "items": { "$ref": "#/components/schemas/AetherGraphModuleNode" }, "nullable": true }, "datasetNodes": { "type": "array", "items": { "$ref": "#/components/schemas/AetherGraphDatasetNode" }, "nullable": true }, "subGraphNodes": { "type": "array", "items": { "$ref": "#/components/schemas/AetherGraphReferenceNode" }, "nullable": true }, "controlReferenceNodes": { "type": "array", "items": { "$ref": "#/components/schemas/AetherGraphControlReferenceNode" }, "nullable": true }, "controlNodes": { "type": "array", "items": { "$ref": "#/components/schemas/AetherGraphControlNode" }, "nullable": true }, "edges": { "type": "array", "items": { "$ref": "#/components/schemas/AetherGraphEdge" }, "nullable": true }, "defaultCompute": { "$ref": "#/components/schemas/AetherComputeSetting" }, "defaultDatastore": { "$ref": "#/components/schemas/AetherDatastoreSetting" }, "defaultCloudPriority": { "$ref": "#/components/schemas/AetherCloudPrioritySetting" }, "parentSubGraphModuleIds": { "type": "array", "items": { "type": "string" }, "nullable": true }, "id": { "type": "string", "nullable": true }, "workspaceId": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "tags": { "type": "array", "items": { "type": "string" }, "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" }, "entityStatus": { "$ref": "#/components/schemas/AetherEntityStatus" } }, "additionalProperties": false }, "AetherGraphModuleNode": { "type": "object", "properties": { "cloudPriority": { "type": "integer", "format": "int32" }, "defaultDataRetentionHint": { "type": "integer", "format": "int32", "nullable": true }, "complianceCluster": { "type": "string", "nullable": true }, "euclidWorkspaceId": { "type": "string", "nullable": true }, "attachedModules": { "type": "array", "items": { "type": "string" }, "nullable": true }, "acceptableMachineClusters": { "type": "array", "items": { "type": "string" }, "nullable": true }, "customDataLocationId": { "type": "string", "nullable": true }, "alertTimeoutDuration": { "type": "string", "format": "date-span", "nullable": true }, "runconfig": { "type": "string", "nullable": true }, "id": { "type": "string", "nullable": true }, "moduleId": { "type": "string", "nullable": true }, "comment": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "moduleParameters": { "type": "array", "items": { "$ref": "#/components/schemas/AetherParameterAssignment" }, "nullable": true }, "moduleMetadataParameters": { "type": "array", "items": { "$ref": "#/components/schemas/AetherParameterAssignment" }, "nullable": true }, "moduleOutputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/AetherOutputSetting" }, "nullable": true }, "moduleInputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/AetherInputSetting" }, "nullable": true }, "useGraphDefaultCompute": { "type": "boolean" }, "useGraphDefaultDatastore": { "type": "boolean" }, "regenerateOutput": { "type": "boolean" }, "controlInputs": { "type": "array", "items": { "$ref": "#/components/schemas/AetherControlInput" }, "nullable": true }, "cloudSettings": { "$ref": "#/components/schemas/AetherCloudSettings" }, "executionPhase": { "$ref": "#/components/schemas/AetherExecutionPhase" }, "runAttribution": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherGraphReferenceNode": { "type": "object", "properties": { "graphId": { "type": "string", "nullable": true }, "defaultCompute": { "$ref": "#/components/schemas/AetherComputeSetting" }, "defaultDatastore": { "$ref": "#/components/schemas/AetherDatastoreSetting" }, "id": { "type": "string", "nullable": true }, "moduleId": { "type": "string", "nullable": true }, "comment": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "moduleParameters": { "type": "array", "items": { "$ref": "#/components/schemas/AetherParameterAssignment" }, "nullable": true }, "moduleMetadataParameters": { "type": "array", "items": { "$ref": "#/components/schemas/AetherParameterAssignment" }, "nullable": true }, "moduleOutputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/AetherOutputSetting" }, "nullable": true }, "moduleInputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/AetherInputSetting" }, "nullable": true }, "useGraphDefaultCompute": { "type": "boolean" }, "useGraphDefaultDatastore": { "type": "boolean" }, "regenerateOutput": { "type": "boolean" }, "controlInputs": { "type": "array", "items": { "$ref": "#/components/schemas/AetherControlInput" }, "nullable": true }, "cloudSettings": { "$ref": "#/components/schemas/AetherCloudSettings" }, "executionPhase": { "$ref": "#/components/schemas/AetherExecutionPhase" }, "runAttribution": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherHdfsReference": { "type": "object", "properties": { "amlDataStoreName": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherHdiClusterComputeInfo": { "type": "object", "properties": { "address": { "type": "string", "nullable": true }, "username": { "type": "string", "nullable": true }, "password": { "type": "string", "nullable": true }, "privateKey": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherHdiRunConfiguration": { "type": "object", "properties": { "file": { "type": "string", "nullable": true }, "className": { "type": "string", "nullable": true }, "files": { "type": "array", "items": { "type": "string" }, "nullable": true }, "archives": { "type": "array", "items": { "type": "string" }, "nullable": true }, "jars": { "type": "array", "items": { "type": "string" }, "nullable": true }, "pyFiles": { "type": "array", "items": { "type": "string" }, "nullable": true }, "computeName": { "type": "string", "nullable": true }, "queue": { "type": "string", "nullable": true }, "driverMemory": { "type": "string", "nullable": true }, "driverCores": { "type": "integer", "format": "int32", "nullable": true }, "executorMemory": { "type": "string", "nullable": true }, "executorCores": { "type": "integer", "format": "int32", "nullable": true }, "numberExecutors": { "type": "integer", "format": "int32", "nullable": true }, "conf": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "name": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherHyperDriveConfiguration": { "type": "object", "properties": { "hyperDriveRunConfig": { "type": "string", "nullable": true }, "primaryMetricGoal": { "type": "string", "nullable": true }, "primaryMetricName": { "type": "string", "nullable": true }, "arguments": { "type": "array", "items": { "$ref": "#/components/schemas/AetherArgumentAssignment" }, "nullable": true } }, "additionalProperties": false }, "AetherIdentitySetting": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/AetherIdentityType" }, "clientId": { "type": "string", "format": "uuid", "nullable": true }, "objectId": { "type": "string", "format": "uuid", "nullable": true }, "msiResourceId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherIdentityType": { "enum": [ "UserIdentity", "Managed", "AMLToken" ], "type": "string" }, "AetherImportDataTask": { "type": "object", "properties": { "DataTransferSource": { "$ref": "#/components/schemas/AetherDataTransferSource" } }, "additionalProperties": false }, "AetherInputSetting": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "dataStoreMode": { "$ref": "#/components/schemas/AetherDataStoreMode" }, "pathOnCompute": { "type": "string", "nullable": true }, "options": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "additionalTransformations": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherInteractiveConfig": { "type": "object", "properties": { "isSSHEnabled": { "type": "boolean", "nullable": true }, "sshPublicKey": { "type": "string", "nullable": true }, "isIPythonEnabled": { "type": "boolean", "nullable": true }, "isTensorBoardEnabled": { "type": "boolean", "nullable": true }, "interactivePort": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "AetherK8sConfiguration": { "type": "object", "properties": { "maxRetryCount": { "type": "integer", "format": "int32", "nullable": true }, "resourceConfiguration": { "$ref": "#/components/schemas/AetherResourceConfig" }, "priorityConfiguration": { "$ref": "#/components/schemas/AetherPriorityConfig" }, "interactiveConfiguration": { "$ref": "#/components/schemas/AetherInteractiveConfig" } }, "additionalProperties": false }, "AetherLegacyDataPath": { "type": "object", "properties": { "dataStoreName": { "type": "string", "nullable": true }, "dataStoreMode": { "$ref": "#/components/schemas/AetherDataStoreMode" }, "relativePath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherLimitSettings": { "type": "object", "properties": { "maxTrials": { "type": "integer", "format": "int32", "nullable": true }, "timeout": { "type": "string", "format": "date-span", "nullable": true }, "trialTimeout": { "type": "string", "format": "date-span", "nullable": true }, "maxConcurrentTrials": { "type": "integer", "format": "int32", "nullable": true }, "maxCoresPerTrial": { "type": "integer", "format": "int32", "nullable": true }, "exitScore": { "type": "number", "format": "double", "nullable": true }, "enableEarlyTermination": { "type": "boolean", "nullable": true }, "maxNodes": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "AetherLogVerbosity": { "enum": [ "NotSet", "Debug", "Info", "Warning", "Error", "Critical" ], "type": "string" }, "AetherMlcComputeInfo": { "type": "object", "properties": { "mlcComputeType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherModuleDeploymentSource": { "enum": [ "Client", "AutoDeployment", "Vsts" ], "type": "string" }, "AetherModuleEntity": { "type": "object", "properties": { "lastUpdatedBy": { "$ref": "#/components/schemas/AetherCreatedBy" }, "displayName": { "type": "string", "nullable": true }, "moduleExecutionType": { "type": "string", "nullable": true }, "moduleType": { "$ref": "#/components/schemas/AetherModuleType" }, "moduleTypeVersion": { "type": "string", "nullable": true }, "resourceRequirements": { "$ref": "#/components/schemas/AetherResourceModel" }, "machineCluster": { "type": "array", "items": { "type": "string" }, "nullable": true }, "defaultComplianceCluster": { "type": "string", "nullable": true }, "repositoryType": { "$ref": "#/components/schemas/AetherRepositoryType" }, "relativePathToSourceCode": { "type": "string", "nullable": true }, "commitId": { "type": "string", "nullable": true }, "codeReviewLink": { "type": "string", "nullable": true }, "unitTestsAvailable": { "type": "boolean" }, "isCompressed": { "type": "boolean" }, "executionEnvironment": { "$ref": "#/components/schemas/AetherExecutionEnvironment" }, "isOutputMarkupEnabled": { "type": "boolean" }, "dockerImageId": { "type": "string", "nullable": true }, "dockerImageReference": { "type": "string", "nullable": true }, "dockerImageSecurityGroups": { "type": "string", "nullable": true }, "extendedProperties": { "$ref": "#/components/schemas/AetherModuleExtendedProperties" }, "deploymentSource": { "$ref": "#/components/schemas/AetherModuleDeploymentSource" }, "deploymentSourceMetadata": { "type": "string", "nullable": true }, "identifierHash": { "type": "string", "nullable": true }, "identifierHashV2": { "type": "string", "nullable": true }, "kvTags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "createdBy": { "$ref": "#/components/schemas/AetherCreatedBy" }, "runconfig": { "type": "string", "nullable": true }, "cloudSettings": { "$ref": "#/components/schemas/AetherCloudSettings" }, "category": { "type": "string", "nullable": true }, "stepType": { "type": "string", "nullable": true }, "stage": { "type": "string", "nullable": true }, "uploadState": { "$ref": "#/components/schemas/AetherUploadState" }, "sourceCodeLocation": { "type": "string", "nullable": true }, "sizeInBytes": { "type": "integer", "format": "int64" }, "downloadLocation": { "type": "string", "nullable": true }, "dataLocation": { "$ref": "#/components/schemas/AetherDataLocation" }, "scriptingRuntimeId": { "type": "string", "nullable": true }, "interfaceDocumentation": { "$ref": "#/components/schemas/AetherEntityInterfaceDocumentation" }, "isEyesOn": { "type": "boolean" }, "complianceCluster": { "type": "string", "nullable": true }, "isDeterministic": { "type": "boolean" }, "informationUrl": { "type": "string", "nullable": true }, "isExperimentIdInParameters": { "type": "boolean" }, "interfaceString": { "type": "string", "nullable": true }, "defaultParameters": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "structuredInterface": { "$ref": "#/components/schemas/AetherStructuredInterface" }, "familyId": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "hash": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "sequenceNumberInFamily": { "type": "integer", "format": "int32" }, "owner": { "type": "string", "nullable": true }, "azureTenantId": { "type": "string", "nullable": true }, "azureUserId": { "type": "string", "nullable": true }, "collaborators": { "type": "array", "items": { "type": "string" }, "nullable": true }, "id": { "type": "string", "nullable": true }, "workspaceId": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "tags": { "type": "array", "items": { "type": "string" }, "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" }, "entityStatus": { "$ref": "#/components/schemas/AetherEntityStatus" } }, "additionalProperties": false }, "AetherModuleExtendedProperties": { "type": "object", "properties": { "autoDeployedArtifact": { "$ref": "#/components/schemas/AetherBuildArtifactInfo" }, "scriptNeedsApproval": { "type": "boolean" } }, "additionalProperties": false }, "AetherModuleHashVersion": { "enum": [ "IdentifierHash", "IdentifierHashV2" ], "type": "string" }, "AetherModuleType": { "enum": [ "None", "BatchInferencing" ], "type": "string" }, "AetherNCrossValidationMode": { "enum": [ "Auto", "Custom" ], "type": "string" }, "AetherNCrossValidations": { "type": "object", "properties": { "mode": { "$ref": "#/components/schemas/AetherNCrossValidationMode" }, "value": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "AetherOutputSetting": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "dataStoreName": { "type": "string", "nullable": true }, "DataStoreNameParameterAssignment": { "$ref": "#/components/schemas/AetherParameterAssignment" }, "dataStoreMode": { "$ref": "#/components/schemas/AetherDataStoreMode" }, "DataStoreModeParameterAssignment": { "$ref": "#/components/schemas/AetherParameterAssignment" }, "pathOnCompute": { "type": "string", "nullable": true }, "PathOnComputeParameterAssignment": { "$ref": "#/components/schemas/AetherParameterAssignment" }, "overwrite": { "type": "boolean" }, "dataReferenceName": { "type": "string", "nullable": true }, "webServicePort": { "type": "string", "nullable": true }, "datasetRegistration": { "$ref": "#/components/schemas/AetherDatasetRegistration" }, "datasetOutputOptions": { "$ref": "#/components/schemas/AetherDatasetOutputOptions" }, "AssetOutputSettings": { "$ref": "#/components/schemas/AetherAssetOutputSettings" }, "parameterName": { "type": "string", "nullable": true }, "AssetOutputSettingsParameterName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherParallelForControlFlowInfo": { "type": "object", "properties": { "parallelForItemsInput": { "$ref": "#/components/schemas/AetherParameterAssignment" } }, "additionalProperties": false }, "AetherParameterAssignment": { "type": "object", "properties": { "valueType": { "$ref": "#/components/schemas/AetherParameterValueType" }, "assignmentsToConcatenate": { "type": "array", "items": { "$ref": "#/components/schemas/AetherParameterAssignment" }, "nullable": true }, "dataPathAssignment": { "$ref": "#/components/schemas/AetherLegacyDataPath" }, "dataSetDefinitionValueAssignment": { "$ref": "#/components/schemas/AetherDataSetDefinitionValue" }, "name": { "type": "string", "nullable": true }, "value": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherParameterType": { "enum": [ "Int", "Double", "Bool", "String", "Undefined" ], "type": "string" }, "AetherParameterValueType": { "enum": [ "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition" ], "type": "string" }, "AetherPhillyHdfsReference": { "type": "object", "properties": { "cluster": { "type": "string", "nullable": true }, "vc": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherPortInfo": { "type": "object", "properties": { "nodeId": { "type": "string", "nullable": true }, "portName": { "type": "string", "nullable": true }, "graphPortName": { "type": "string", "nullable": true }, "isParameter": { "type": "boolean" }, "webServicePort": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherPrimaryMetrics": { "enum": [ "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError", "NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou" ], "type": "string" }, "AetherPriorityConfig": { "type": "object", "properties": { "jobPriority": { "type": "integer", "format": "int32", "nullable": true }, "isPreemptible": { "type": "boolean", "nullable": true }, "nodeCountSet": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true }, "scaleInterval": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "AetherPriorityConfiguration": { "type": "object", "properties": { "cloudPriority": { "type": "integer", "format": "int32", "nullable": true }, "stringTypePriority": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherRegisteredDataSetReference": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherRemoteDockerComputeInfo": { "type": "object", "properties": { "address": { "type": "string", "nullable": true }, "username": { "type": "string", "nullable": true }, "password": { "type": "string", "nullable": true }, "privateKey": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherRepositoryType": { "enum": [ "None", "Other", "Git", "SourceDepot", "Cosmos" ], "type": "string" }, "AetherResourceAssignment": { "type": "object", "properties": { "attributes": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AetherResourceAttributeAssignment" }, "nullable": true } }, "additionalProperties": false }, "AetherResourceAttributeAssignment": { "type": "object", "properties": { "attribute": { "$ref": "#/components/schemas/AetherResourceAttributeDefinition" }, "operator": { "$ref": "#/components/schemas/AetherResourceOperator" }, "value": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherResourceAttributeDefinition": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "type": { "$ref": "#/components/schemas/AetherResourceValueType" }, "units": { "type": "string", "nullable": true }, "allowedOperators": { "uniqueItems": true, "type": "array", "items": { "$ref": "#/components/schemas/AetherResourceOperator" }, "nullable": true } }, "additionalProperties": false }, "AetherResourceConfig": { "type": "object", "properties": { "gpuCount": { "type": "integer", "format": "int32", "nullable": true }, "cpuCount": { "type": "integer", "format": "int32", "nullable": true }, "memoryRequestInGB": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "AetherResourceConfiguration": { "type": "object", "properties": { "instanceCount": { "type": "integer", "format": "int32", "nullable": true }, "instanceType": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "locations": { "type": "array", "items": { "type": "string" }, "nullable": true }, "instancePriority": { "type": "string", "nullable": true }, "quotaEnforcementResourceId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherResourceModel": { "type": "object", "properties": { "resources": { "type": "array", "items": { "$ref": "#/components/schemas/AetherResourceAssignment" }, "nullable": true } }, "additionalProperties": false }, "AetherResourceOperator": { "enum": [ "Equal", "Contain", "GreaterOrEqual" ], "type": "string" }, "AetherResourceValueType": { "enum": [ "String", "Double" ], "type": "string" }, "AetherResourcesSetting": { "type": "object", "properties": { "instanceSize": { "type": "string", "nullable": true }, "sparkVersion": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherSamplingAlgorithmType": { "enum": [ "Random", "Grid", "Bayesian" ], "type": "string" }, "AetherSavedDataSetReference": { "type": "object", "properties": { "id": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherScopeCloudConfiguration": { "type": "object", "properties": { "inputPathSuffixes": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AetherArgumentAssignment" }, "description": "This is a dictionary", "nullable": true }, "outputPathSuffixes": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AetherArgumentAssignment" }, "description": "This is a dictionary", "nullable": true }, "userAlias": { "type": "string", "nullable": true }, "tokens": { "type": "integer", "format": "int32", "nullable": true }, "autoToken": { "type": "integer", "format": "int32", "nullable": true }, "vcp": { "type": "number", "format": "float", "nullable": true } }, "additionalProperties": false }, "AetherSeasonality": { "type": "object", "properties": { "mode": { "$ref": "#/components/schemas/AetherSeasonalityMode" }, "value": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "AetherSeasonalityMode": { "enum": [ "Auto", "Custom" ], "type": "string" }, "AetherShortSeriesHandlingConfiguration": { "enum": [ "Auto", "Pad", "Drop" ], "type": "string" }, "AetherSqlDataPath": { "type": "object", "properties": { "sqlTableName": { "type": "string", "nullable": true }, "sqlQuery": { "type": "string", "nullable": true }, "sqlStoredProcedureName": { "type": "string", "nullable": true }, "sqlStoredProcedureParams": { "type": "array", "items": { "$ref": "#/components/schemas/AetherStoredProcedureParameter" }, "nullable": true } }, "additionalProperties": false }, "AetherStackEnsembleSettings": { "type": "object", "properties": { "stackMetaLearnerType": { "$ref": "#/components/schemas/AetherStackMetaLearnerType" }, "stackMetaLearnerTrainPercentage": { "type": "number", "format": "double", "nullable": true }, "stackMetaLearnerKWargs": { "nullable": true } }, "additionalProperties": false }, "AetherStackMetaLearnerType": { "enum": [ "None", "LogisticRegression", "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", "LightGBMRegressor", "LinearRegression" ], "type": "string" }, "AetherStoredProcedureParameter": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "value": { "type": "string", "nullable": true }, "type": { "$ref": "#/components/schemas/AetherStoredProcedureParameterType" } }, "additionalProperties": false }, "AetherStoredProcedureParameterType": { "enum": [ "String", "Int", "Decimal", "Guid", "Boolean", "Date" ], "type": "string" }, "AetherStructuredInterface": { "type": "object", "properties": { "commandLinePattern": { "type": "string", "nullable": true }, "inputs": { "type": "array", "items": { "$ref": "#/components/schemas/AetherStructuredInterfaceInput" }, "nullable": true }, "outputs": { "type": "array", "items": { "$ref": "#/components/schemas/AetherStructuredInterfaceOutput" }, "nullable": true }, "controlOutputs": { "type": "array", "items": { "$ref": "#/components/schemas/AetherControlOutput" }, "nullable": true }, "parameters": { "type": "array", "items": { "$ref": "#/components/schemas/AetherStructuredInterfaceParameter" }, "nullable": true }, "metadataParameters": { "type": "array", "items": { "$ref": "#/components/schemas/AetherStructuredInterfaceParameter" }, "nullable": true }, "arguments": { "type": "array", "items": { "$ref": "#/components/schemas/AetherArgumentAssignment" }, "nullable": true } }, "additionalProperties": false }, "AetherStructuredInterfaceInput": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "label": { "type": "string", "nullable": true }, "dataTypeIdsList": { "type": "array", "items": { "type": "string" }, "nullable": true }, "isOptional": { "type": "boolean" }, "description": { "type": "string", "nullable": true }, "skipProcessing": { "type": "boolean" }, "isResource": { "type": "boolean" }, "dataStoreMode": { "$ref": "#/components/schemas/AetherDataStoreMode" }, "pathOnCompute": { "type": "string", "nullable": true }, "overwrite": { "type": "boolean" }, "dataReferenceName": { "type": "string", "nullable": true }, "datasetTypes": { "uniqueItems": true, "type": "array", "items": { "$ref": "#/components/schemas/AetherDatasetType" }, "nullable": true }, "additionalTransformations": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherStructuredInterfaceOutput": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "label": { "type": "string", "nullable": true }, "dataTypeId": { "type": "string", "nullable": true }, "passThroughDataTypeInputName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "skipProcessing": { "type": "boolean" }, "isArtifact": { "type": "boolean" }, "dataStoreName": { "type": "string", "nullable": true }, "dataStoreMode": { "$ref": "#/components/schemas/AetherDataStoreMode" }, "pathOnCompute": { "type": "string", "nullable": true }, "overwrite": { "type": "boolean" }, "dataReferenceName": { "type": "string", "nullable": true }, "trainingOutput": { "$ref": "#/components/schemas/AetherTrainingOutput" }, "datasetOutput": { "$ref": "#/components/schemas/AetherDatasetOutput" }, "AssetOutputSettings": { "$ref": "#/components/schemas/AetherAssetOutputSettings" }, "earlyAvailable": { "type": "boolean" } }, "additionalProperties": false }, "AetherStructuredInterfaceParameter": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "label": { "type": "string", "nullable": true }, "parameterType": { "$ref": "#/components/schemas/AetherParameterType" }, "isOptional": { "type": "boolean" }, "defaultValue": { "type": "string", "nullable": true }, "lowerBound": { "type": "string", "nullable": true }, "upperBound": { "type": "string", "nullable": true }, "enumValues": { "type": "array", "items": { "type": "string" }, "nullable": true }, "enumValuesToArgumentStrings": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "description": { "type": "string", "nullable": true }, "setEnvironmentVariable": { "type": "boolean" }, "environmentVariableOverride": { "type": "string", "nullable": true }, "enabledByParameterName": { "type": "string", "nullable": true }, "enabledByParameterValues": { "type": "array", "items": { "type": "string" }, "nullable": true }, "uiHint": { "$ref": "#/components/schemas/AetherUIParameterHint" }, "groupNames": { "type": "array", "items": { "type": "string" }, "nullable": true }, "argumentName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherSubGraphConfiguration": { "type": "object", "properties": { "graphId": { "type": "string", "nullable": true }, "graphDraftId": { "type": "string", "nullable": true }, "defaultComputeInternal": { "$ref": "#/components/schemas/AetherComputeSetting" }, "defaultDatastoreInternal": { "$ref": "#/components/schemas/AetherDatastoreSetting" }, "DefaultCloudPriority": { "$ref": "#/components/schemas/AetherCloudPrioritySetting" }, "UserAlias": { "type": "string", "nullable": true }, "IsDynamic": { "type": "boolean", "default": false, "nullable": true } }, "additionalProperties": false }, "AetherSweepEarlyTerminationPolicy": { "type": "object", "properties": { "policyType": { "$ref": "#/components/schemas/AetherEarlyTerminationPolicyType" }, "evaluationInterval": { "type": "integer", "format": "int32", "nullable": true }, "delayEvaluation": { "type": "integer", "format": "int32", "nullable": true }, "slackFactor": { "type": "number", "format": "float", "nullable": true }, "slackAmount": { "type": "number", "format": "float", "nullable": true }, "truncationPercentage": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "AetherSweepSettings": { "type": "object", "properties": { "limits": { "$ref": "#/components/schemas/AetherSweepSettingsLimits" }, "searchSpace": { "type": "array", "items": { "type": "object", "additionalProperties": { "type": "string" } }, "nullable": true }, "samplingAlgorithm": { "$ref": "#/components/schemas/AetherSamplingAlgorithmType" }, "earlyTermination": { "$ref": "#/components/schemas/AetherSweepEarlyTerminationPolicy" } }, "additionalProperties": false }, "AetherSweepSettingsLimits": { "type": "object", "properties": { "maxTotalTrials": { "type": "integer", "format": "int32", "nullable": true }, "maxConcurrentTrials": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "AetherTabularTrainingMode": { "enum": [ "Distributed", "NonDistributed", "Auto" ], "type": "string" }, "AetherTargetAggregationFunction": { "enum": [ "Sum", "Max", "Min", "Mean" ], "type": "string" }, "AetherTargetLags": { "type": "object", "properties": { "mode": { "$ref": "#/components/schemas/AetherTargetLagsMode" }, "values": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true } }, "additionalProperties": false }, "AetherTargetLagsMode": { "enum": [ "Auto", "Custom" ], "type": "string" }, "AetherTargetRollingWindowSize": { "type": "object", "properties": { "mode": { "$ref": "#/components/schemas/AetherTargetRollingWindowSizeMode" }, "value": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "AetherTargetRollingWindowSizeMode": { "enum": [ "Auto", "Custom" ], "type": "string" }, "AetherTargetSelectorConfiguration": { "type": "object", "properties": { "lowPriorityVMTolerant": { "type": "boolean" }, "clusterBlockList": { "type": "array", "items": { "type": "string" }, "nullable": true }, "computeType": { "type": "string", "nullable": true }, "instanceType": { "type": "array", "items": { "type": "string" }, "nullable": true }, "instanceTypes": { "type": "array", "items": { "type": "string" }, "nullable": true }, "myResourceOnly": { "type": "boolean" }, "planId": { "type": "string", "nullable": true }, "planRegionId": { "type": "string", "nullable": true }, "region": { "type": "array", "items": { "type": "string" }, "nullable": true }, "regions": { "type": "array", "items": { "type": "string" }, "nullable": true }, "vcBlockList": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "AetherTaskType": { "enum": [ "Classification", "Regression", "Forecasting", "ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER", "TextClassificationMultilabel" ], "type": "string" }, "AetherTestDataSettings": { "type": "object", "properties": { "testDataSize": { "type": "number", "format": "double", "nullable": true } }, "additionalProperties": false }, "AetherTorchDistributedConfiguration": { "type": "object", "properties": { "processCountPerNode": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "AetherTrainingOutput": { "type": "object", "properties": { "trainingOutputType": { "$ref": "#/components/schemas/AetherTrainingOutputType" }, "iteration": { "type": "integer", "format": "int32", "nullable": true }, "metric": { "type": "string", "nullable": true }, "modelFile": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherTrainingOutputType": { "enum": [ "Metrics", "Model" ], "type": "string" }, "AetherTrainingSettings": { "type": "object", "properties": { "blockListModels": { "type": "array", "items": { "type": "string" }, "nullable": true }, "allowListModels": { "type": "array", "items": { "type": "string" }, "nullable": true }, "enableDnnTraining": { "type": "boolean", "nullable": true }, "enableOnnxCompatibleModels": { "type": "boolean", "nullable": true }, "stackEnsembleSettings": { "$ref": "#/components/schemas/AetherStackEnsembleSettings" }, "enableStackEnsemble": { "type": "boolean", "nullable": true }, "enableVoteEnsemble": { "type": "boolean", "nullable": true }, "ensembleModelDownloadTimeout": { "type": "string", "format": "date-span", "nullable": true }, "enableModelExplainability": { "type": "boolean", "nullable": true }, "trainingMode": { "$ref": "#/components/schemas/AetherTabularTrainingMode" } }, "additionalProperties": false }, "AetherUIAzureOpenAIDeploymentNameSelector": { "type": "object", "properties": { "Capabilities": { "$ref": "#/components/schemas/AetherUIAzureOpenAIModelCapabilities" } }, "additionalProperties": false }, "AetherUIAzureOpenAIModelCapabilities": { "type": "object", "properties": { "Completion": { "type": "boolean", "nullable": true }, "ChatCompletion": { "type": "boolean", "nullable": true }, "Embeddings": { "type": "boolean", "nullable": true } }, "additionalProperties": false }, "AetherUIColumnPicker": { "type": "object", "properties": { "columnPickerFor": { "type": "string", "nullable": true }, "columnSelectionCategories": { "type": "array", "items": { "type": "string" }, "nullable": true }, "singleColumnSelection": { "type": "boolean" } }, "additionalProperties": false }, "AetherUIJsonEditor": { "type": "object", "properties": { "jsonSchema": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherUIParameterHint": { "type": "object", "properties": { "uiWidgetType": { "$ref": "#/components/schemas/AetherUIWidgetTypeEnum" }, "columnPicker": { "$ref": "#/components/schemas/AetherUIColumnPicker" }, "uiScriptLanguage": { "$ref": "#/components/schemas/AetherUIScriptLanguageEnum" }, "jsonEditor": { "$ref": "#/components/schemas/AetherUIJsonEditor" }, "PromptFlowConnectionSelector": { "$ref": "#/components/schemas/AetherUIPromptFlowConnectionSelector" }, "AzureOpenAIDeploymentNameSelector": { "$ref": "#/components/schemas/AetherUIAzureOpenAIDeploymentNameSelector" }, "UxIgnore": { "type": "boolean" }, "Anonymous": { "type": "boolean" } }, "additionalProperties": false }, "AetherUIPromptFlowConnectionSelector": { "type": "object", "properties": { "PromptFlowConnectionType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherUIScriptLanguageEnum": { "enum": [ "None", "Python", "R", "Json", "Sql" ], "type": "string" }, "AetherUIWidgetTypeEnum": { "enum": [ "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection" ], "type": "string" }, "AetherUploadState": { "enum": [ "Uploading", "Completed", "Canceled", "Failed" ], "type": "string" }, "AetherUseStl": { "enum": [ "Season", "SeasonTrend" ], "type": "string" }, "AetherValidationDataSettings": { "type": "object", "properties": { "nCrossValidations": { "$ref": "#/components/schemas/AetherNCrossValidations" }, "validationDataSize": { "type": "number", "format": "double", "nullable": true }, "cvSplitColumnNames": { "type": "array", "items": { "type": "string" }, "nullable": true }, "validationType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherVsoBuildArtifactInfo": { "type": "object", "properties": { "buildInfo": { "$ref": "#/components/schemas/AetherVsoBuildInfo" }, "downloadUrl": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AetherVsoBuildDefinitionInfo": { "type": "object", "properties": { "accountName": { "type": "string", "nullable": true }, "projectId": { "type": "string", "format": "uuid" }, "buildDefinitionId": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "AetherVsoBuildInfo": { "type": "object", "properties": { "definitionInfo": { "$ref": "#/components/schemas/AetherVsoBuildDefinitionInfo" }, "buildId": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "AmlDataset": { "type": "object", "properties": { "registeredDataSetReference": { "$ref": "#/components/schemas/RegisteredDataSetReference" }, "savedDataSetReference": { "$ref": "#/components/schemas/SavedDataSetReference" }, "additionalTransformations": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AmlK8sConfiguration": { "type": "object", "properties": { "resourceConfiguration": { "$ref": "#/components/schemas/ResourceConfiguration" }, "priorityConfiguration": { "$ref": "#/components/schemas/AmlK8sPriorityConfiguration" }, "interactiveConfiguration": { "$ref": "#/components/schemas/InteractiveConfiguration" } }, "additionalProperties": false }, "AmlK8sPriorityConfiguration": { "type": "object", "properties": { "jobPriority": { "type": "integer", "format": "int32", "nullable": true }, "isPreemptible": { "type": "boolean", "nullable": true }, "nodeCountSet": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true }, "scaleInterval": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "AmlSparkCloudSetting": { "type": "object", "properties": { "entry": { "$ref": "#/components/schemas/EntrySetting" }, "files": { "type": "array", "items": { "type": "string" }, "nullable": true }, "archives": { "type": "array", "items": { "type": "string" }, "nullable": true }, "jars": { "type": "array", "items": { "type": "string" }, "nullable": true }, "pyFiles": { "type": "array", "items": { "type": "string" }, "nullable": true }, "driverMemory": { "type": "string", "nullable": true }, "driverCores": { "type": "integer", "format": "int32", "nullable": true }, "executorMemory": { "type": "string", "nullable": true }, "executorCores": { "type": "integer", "format": "int32", "nullable": true }, "numberExecutors": { "type": "integer", "format": "int32", "nullable": true }, "environmentAssetId": { "type": "string", "nullable": true }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "inlineEnvironmentDefinitionString": { "type": "string", "nullable": true }, "conf": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "compute": { "type": "string", "nullable": true }, "resources": { "$ref": "#/components/schemas/ResourcesSetting" }, "identity": { "$ref": "#/components/schemas/IdentitySetting" } }, "additionalProperties": false }, "ApiAndParameters": { "type": "object", "properties": { "api": { "type": "string", "nullable": true }, "parameters": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowToolSettingParameter" }, "description": "This is a dictionary", "nullable": true }, "default_prompt": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ApplicationEndpointConfiguration": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/ApplicationEndpointType" }, "port": { "type": "integer", "format": "int32", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "nodes": { "$ref": "#/components/schemas/Nodes" } }, "additionalProperties": false }, "ApplicationEndpointType": { "enum": [ "Jupyter", "JupyterLab", "SSH", "TensorBoard", "VSCode", "Theia", "Grafana", "Custom", "RayDashboard" ], "type": "string" }, "ArgumentAssignment": { "type": "object", "properties": { "valueType": { "$ref": "#/components/schemas/ArgumentValueType" }, "value": { "type": "string", "nullable": true }, "nestedArgumentList": { "type": "array", "items": { "$ref": "#/components/schemas/ArgumentAssignment" }, "nullable": true }, "stringInterpolationArgumentList": { "type": "array", "items": { "$ref": "#/components/schemas/ArgumentAssignment" }, "nullable": true } }, "additionalProperties": false }, "ArgumentValueType": { "enum": [ "Literal", "Parameter", "Input", "Output", "NestedList", "StringInterpolationList" ], "type": "string" }, "Asset": { "type": "object", "properties": { "assetId": { "type": "string", "nullable": true }, "type": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AssetDefinition": { "type": "object", "properties": { "path": { "type": "string", "nullable": true }, "type": { "$ref": "#/components/schemas/AEVAAssetType" }, "assetId": { "type": "string", "nullable": true }, "serializedAssetId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AssetNameAndVersionIdentifier": { "type": "object", "properties": { "assetName": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "feedName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AssetOutputSettings": { "type": "object", "properties": { "path": { "type": "string", "nullable": true }, "PathParameterAssignment": { "$ref": "#/components/schemas/ParameterAssignment" }, "type": { "$ref": "#/components/schemas/AEVAAssetType" }, "options": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataStoreMode": { "$ref": "#/components/schemas/AEVADataStoreMode" }, "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AssetOutputSettingsParameter": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "documentation": { "type": "string", "nullable": true }, "defaultValue": { "$ref": "#/components/schemas/AssetOutputSettings" } }, "additionalProperties": false }, "AssetPublishResult": { "type": "object", "properties": { "feedName": { "type": "string", "nullable": true }, "assetName": { "type": "string", "nullable": true }, "assetVersion": { "type": "string", "nullable": true }, "stepName": { "type": "string", "nullable": true }, "status": { "type": "string", "nullable": true }, "errorMessage": { "type": "string", "nullable": true }, "createdTime": { "type": "string", "format": "date-time" }, "lastUpdatedTime": { "type": "string", "format": "date-time", "nullable": true }, "regionalPublishResults": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AssetPublishSingleRegionResult" }, "nullable": true } }, "additionalProperties": false }, "AssetPublishSingleRegionResult": { "type": "object", "properties": { "stepName": { "type": "string", "nullable": true }, "status": { "type": "string", "nullable": true }, "errorMessage": { "type": "string", "nullable": true }, "lastUpdatedTime": { "type": "string", "format": "date-time" }, "totalSteps": { "type": "integer", "format": "int32" }, "finishedSteps": { "type": "integer", "format": "int32" }, "remainingSteps": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "AssetScopeTypes": { "enum": [ "Workspace", "Global", "All", "Feed" ], "type": "string" }, "AssetSourceType": { "enum": [ "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip" ], "type": "string" }, "AssetType": { "enum": [ "Component", "Model", "Environment", "Dataset", "DataStore", "SampleGraph", "FlowTool", "FlowToolSetting", "FlowConnection", "FlowSample", "FlowRuntimeSpec" ], "type": "string" }, "AssetTypeMetaInfo": { "type": "object", "properties": { "consumptionMode": { "$ref": "#/components/schemas/ConsumeMode" } }, "additionalProperties": false }, "AssetVersionPublishRequest": { "type": "object", "properties": { "assetType": { "$ref": "#/components/schemas/AssetType" }, "assetSourceType": { "$ref": "#/components/schemas/AssetSourceType" }, "yamlFile": { "type": "string", "nullable": true }, "sourceZipUrl": { "type": "string", "nullable": true }, "sourceZipFile": { "type": "string", "format": "binary", "nullable": true }, "feedName": { "type": "string", "nullable": true }, "setAsDefaultVersion": { "type": "boolean" }, "referencedAssets": { "type": "array", "items": { "$ref": "#/components/schemas/AssetNameAndVersionIdentifier" }, "nullable": true }, "flowFile": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AssignedUser": { "type": "object", "properties": { "objectId": { "type": "string", "nullable": true }, "tenantId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AuthKeys": { "type": "object", "properties": { "primaryKey": { "type": "string", "nullable": true }, "secondaryKey": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AutoClusterComputeSpecification": { "type": "object", "properties": { "instanceSize": { "type": "string", "nullable": true }, "instancePriority": { "type": "string", "nullable": true }, "osType": { "type": "string", "nullable": true }, "location": { "type": "string", "nullable": true }, "runtimeVersion": { "type": "string", "nullable": true }, "quotaEnforcementResourceId": { "type": "string", "nullable": true }, "modelComputeSpecificationId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AutoDeleteCondition": { "enum": [ "CreatedGreaterThan", "LastAccessedGreaterThan" ], "type": "string" }, "AutoDeleteSetting": { "type": "object", "properties": { "condition": { "$ref": "#/components/schemas/AutoDeleteCondition" }, "value": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AutoFeaturizeConfiguration": { "type": "object", "properties": { "featurizationConfig": { "$ref": "#/components/schemas/FeaturizationSettings" } }, "additionalProperties": false }, "AutoMLComponentConfiguration": { "type": "object", "properties": { "autoTrainConfig": { "$ref": "#/components/schemas/AutoTrainConfiguration" }, "autoFeaturizeConfig": { "$ref": "#/components/schemas/AutoFeaturizeConfiguration" } }, "additionalProperties": false }, "AutoScaler": { "type": "object", "properties": { "autoscaleEnabled": { "type": "boolean", "nullable": true }, "minReplicas": { "type": "integer", "format": "int32", "nullable": true }, "maxReplicas": { "type": "integer", "format": "int32", "nullable": true }, "targetUtilization": { "type": "integer", "format": "int32", "nullable": true }, "refreshPeriodInSeconds": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "AutoTrainConfiguration": { "type": "object", "properties": { "generalSettings": { "$ref": "#/components/schemas/GeneralSettings" }, "limitSettings": { "$ref": "#/components/schemas/LimitSettings" }, "dataSettings": { "$ref": "#/components/schemas/DataSettings" }, "forecastingSettings": { "$ref": "#/components/schemas/ForecastingSettings" }, "trainingSettings": { "$ref": "#/components/schemas/TrainingSettings" }, "sweepSettings": { "$ref": "#/components/schemas/SweepSettings" }, "imageModelSettings": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "computeConfiguration": { "$ref": "#/components/schemas/AEVAComputeConfiguration" }, "resourceConfigurtion": { "$ref": "#/components/schemas/AEVAResourceConfiguration" }, "environmentId": { "type": "string", "nullable": true }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } }, "additionalProperties": false }, "AutologgerSettings": { "type": "object", "properties": { "mlFlowAutologger": { "$ref": "#/components/schemas/MLFlowAutologgerState" } }, "additionalProperties": false }, "AvailabilityResponse": { "type": "object", "properties": { "isAvailable": { "type": "boolean" }, "error": { "$ref": "#/components/schemas/ErrorResponse" } }, "additionalProperties": false }, "AzureBlobReference": { "type": "object", "properties": { "container": { "type": "string", "nullable": true }, "sasToken": { "type": "string", "nullable": true }, "uri": { "type": "string", "nullable": true }, "account": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "amlDataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AzureDataLakeGen2Reference": { "type": "object", "properties": { "fileSystemName": { "type": "string", "nullable": true }, "uri": { "type": "string", "nullable": true }, "account": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "amlDataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AzureDataLakeReference": { "type": "object", "properties": { "tenant": { "type": "string", "nullable": true }, "subscription": { "type": "string", "nullable": true }, "resourceGroup": { "type": "string", "nullable": true }, "uri": { "type": "string", "nullable": true }, "account": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "amlDataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AzureDatabaseReference": { "type": "object", "properties": { "tableName": { "type": "string", "nullable": true }, "sqlQuery": { "type": "string", "nullable": true }, "storedProcedureName": { "type": "string", "nullable": true }, "storedProcedureParameters": { "type": "array", "items": { "$ref": "#/components/schemas/StoredProcedureParameter" }, "nullable": true }, "amlDataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AzureFilesReference": { "type": "object", "properties": { "share": { "type": "string", "nullable": true }, "uri": { "type": "string", "nullable": true }, "account": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "amlDataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AzureMLModuleVersionDescriptor": { "type": "object", "properties": { "moduleVersionId": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true } }, "additionalProperties": false }, "AzureOpenAIDeploymentDto": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "modelName": { "type": "string", "nullable": true }, "capabilities": { "$ref": "#/components/schemas/AzureOpenAIModelCapabilities" } }, "additionalProperties": false }, "AzureOpenAIModelCapabilities": { "type": "object", "properties": { "completion": { "type": "boolean", "nullable": true }, "chat_completion": { "type": "boolean", "nullable": true }, "embeddings": { "type": "boolean", "nullable": true } }, "additionalProperties": false }, "BatchAiComputeInfo": { "type": "object", "properties": { "batchAiSubscriptionId": { "type": "string", "nullable": true }, "batchAiResourceGroup": { "type": "string", "nullable": true }, "batchAiWorkspaceName": { "type": "string", "nullable": true }, "clusterName": { "type": "string", "nullable": true }, "nativeSharedDirectory": { "type": "string", "nullable": true } }, "additionalProperties": false }, "BatchDataInput": { "type": "object", "properties": { "dataUri": { "type": "string", "nullable": true }, "type": { "type": "string", "nullable": true } }, "additionalProperties": false }, "BatchExportComponentSpecResponse": { "type": "object", "properties": { "componentSpecMetaInfos": { "type": "array", "items": { "$ref": "#/components/schemas/ComponentSpecMetaInfo" }, "nullable": true }, "errors": { "type": "array", "items": { "$ref": "#/components/schemas/ErrorResponse" }, "nullable": true } }, "additionalProperties": false }, "BatchExportRawComponentResponse": { "type": "object", "properties": { "rawComponentDtos": { "type": "array", "items": { "$ref": "#/components/schemas/RawComponentDto" }, "nullable": true }, "errors": { "type": "array", "items": { "$ref": "#/components/schemas/ErrorResponse" }, "nullable": true } }, "additionalProperties": false }, "BatchGetComponentHashesRequest": { "type": "object", "properties": { "moduleHashVersion": { "$ref": "#/components/schemas/AetherModuleHashVersion" }, "moduleEntities": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AetherModuleEntity" }, "nullable": true } }, "additionalProperties": false }, "BatchGetComponentRequest": { "type": "object", "properties": { "versionIds": { "type": "array", "items": { "type": "string" }, "nullable": true }, "nameAndVersions": { "type": "array", "items": { "$ref": "#/components/schemas/ComponentNameMetaInfo" }, "nullable": true } }, "additionalProperties": false }, "Binding": { "type": "object", "properties": { "bindingType": { "$ref": "#/components/schemas/BindingType" } }, "additionalProperties": false }, "BindingType": { "enum": [ "Basic" ], "type": "string" }, "BuildContextLocationType": { "enum": [ "Git", "StorageAccount" ], "type": "string" }, "BulkTestDto": { "type": "object", "properties": { "bulkTestId": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "runtime": { "type": "string", "nullable": true }, "createdBy": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" }, "createdOn": { "type": "string", "format": "date-time", "nullable": true }, "evaluationCount": { "type": "integer", "format": "int32" }, "variantCount": { "type": "integer", "format": "int32" }, "flowSubmitRunSettings": { "$ref": "#/components/schemas/FlowSubmitRunSettings" }, "inputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowInputDefinition" }, "description": "This is a dictionary", "nullable": true }, "outputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowOutputDefinition" }, "description": "This is a dictionary", "nullable": true }, "batch_inputs": { "type": "array", "items": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary" }, "nullable": true }, "batchDataInput": { "$ref": "#/components/schemas/BatchDataInput" } }, "additionalProperties": false }, "CloudError": { "type": "object", "properties": { "code": { "type": "string", "nullable": true }, "message": { "type": "string", "nullable": true }, "target": { "type": "string", "nullable": true }, "details": { "type": "array", "items": { "$ref": "#/components/schemas/CloudError" }, "nullable": true, "readOnly": true }, "additionalInfo": { "type": "array", "items": { "$ref": "#/components/schemas/AdditionalErrorInfo" }, "nullable": true, "readOnly": true } }, "additionalProperties": false }, "CloudPrioritySetting": { "type": "object", "properties": { "scopePriority": { "$ref": "#/components/schemas/PriorityConfiguration" }, "AmlComputePriority": { "$ref": "#/components/schemas/PriorityConfiguration" }, "ItpPriority": { "$ref": "#/components/schemas/PriorityConfiguration" }, "SingularityPriority": { "$ref": "#/components/schemas/PriorityConfiguration" } }, "additionalProperties": false }, "CloudSettings": { "type": "object", "properties": { "linkedSettings": { "type": "array", "items": { "$ref": "#/components/schemas/ParameterAssignment" }, "nullable": true }, "priorityConfig": { "$ref": "#/components/schemas/PriorityConfiguration" }, "hdiRunConfig": { "$ref": "#/components/schemas/HdiRunConfiguration" }, "subGraphConfig": { "$ref": "#/components/schemas/SubGraphConfiguration" }, "autoMLComponentConfig": { "$ref": "#/components/schemas/AutoMLComponentConfiguration" }, "apCloudConfig": { "$ref": "#/components/schemas/APCloudConfiguration" }, "scopeCloudConfig": { "$ref": "#/components/schemas/ScopeCloudConfiguration" }, "esCloudConfig": { "$ref": "#/components/schemas/EsCloudConfiguration" }, "dataTransferCloudConfig": { "$ref": "#/components/schemas/DataTransferCloudConfiguration" }, "amlSparkCloudSetting": { "$ref": "#/components/schemas/AmlSparkCloudSetting" }, "dataTransferV2CloudSetting": { "$ref": "#/components/schemas/DataTransferV2CloudSetting" } }, "additionalProperties": false }, "ColumnTransformer": { "type": "object", "properties": { "fields": { "type": "array", "items": { "type": "string" }, "nullable": true }, "parameters": { "nullable": true } }, "additionalProperties": false }, "CommandJob": { "type": "object", "properties": { "jobType": { "$ref": "#/components/schemas/JobType" }, "codeId": { "type": "string", "nullable": true }, "command": { "minLength": 1, "type": "string", "nullable": true }, "environmentId": { "type": "string", "nullable": true }, "inputDataBindings": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/InputDataBinding" }, "nullable": true }, "outputDataBindings": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/OutputDataBinding" }, "nullable": true }, "distribution": { "$ref": "#/components/schemas/DistributionConfiguration" }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "parameters": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "autologgerSettings": { "$ref": "#/components/schemas/MfeInternalAutologgerSettings" }, "limits": { "$ref": "#/components/schemas/CommandJobLimits" }, "provisioningState": { "$ref": "#/components/schemas/JobProvisioningState" }, "parentJobName": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "experimentName": { "type": "string", "nullable": true }, "status": { "$ref": "#/components/schemas/JobStatus" }, "interactionEndpoints": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/JobEndpoint" }, "nullable": true }, "identity": { "$ref": "#/components/schemas/MfeInternalIdentityConfiguration" }, "compute": { "$ref": "#/components/schemas/ComputeConfiguration" }, "priority": { "type": "integer", "format": "int32", "nullable": true }, "output": { "$ref": "#/components/schemas/JobOutputArtifacts" }, "isArchived": { "type": "boolean" }, "schedule": { "$ref": "#/components/schemas/ScheduleBase" }, "componentId": { "type": "string", "nullable": true }, "notificationSetting": { "$ref": "#/components/schemas/NotificationSetting" }, "secretsConfiguration": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/MfeInternalSecretConfiguration" }, "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "CommandJobLimits": { "type": "object", "properties": { "jobLimitsType": { "$ref": "#/components/schemas/JobLimitsType" }, "timeout": { "type": "string", "format": "date-span", "nullable": true } }, "additionalProperties": false }, "CommandReturnCodeConfig": { "type": "object", "properties": { "returnCode": { "$ref": "#/components/schemas/SuccessfulCommandReturnCode" }, "successfulReturnCodes": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true } }, "additionalProperties": false }, "Communicator": { "enum": [ "None", "ParameterServer", "Gloo", "Mpi", "Nccl", "ParallelTask" ], "type": "string" }, "ComponentConfiguration": { "type": "object", "properties": { "componentIdentifier": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ComponentInput": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "optional": { "type": "boolean" }, "description": { "type": "string", "nullable": true }, "type": { "type": "string", "nullable": true }, "default": { "type": "string", "nullable": true }, "enum": { "type": "array", "items": { "type": "string" }, "nullable": true }, "min": { "type": "string", "nullable": true }, "max": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ComponentJob": { "type": "object", "properties": { "compute": { "$ref": "#/components/schemas/ComputeConfiguration" }, "componentId": { "type": "string", "nullable": true }, "inputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ComponentJobInput" }, "description": "This is a dictionary", "nullable": true }, "outputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ComponentJobOutput" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "ComponentJobInput": { "type": "object", "properties": { "data": { "$ref": "#/components/schemas/InputData" }, "inputBinding": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ComponentJobOutput": { "type": "object", "properties": { "data": { "$ref": "#/components/schemas/MfeInternalOutputData" }, "outputBinding": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ComponentNameAndDefaultVersion": { "type": "object", "properties": { "componentName": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "feedName": { "type": "string", "nullable": true }, "registryName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ComponentNameMetaInfo": { "type": "object", "properties": { "feedName": { "type": "string", "nullable": true }, "componentName": { "type": "string", "nullable": true }, "componentVersion": { "type": "string", "nullable": true }, "registryName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ComponentOutput": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "type": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ComponentPreflightResult": { "type": "object", "properties": { "errorDetails": { "type": "array", "items": { "$ref": "#/components/schemas/RootError" }, "nullable": true } }, "additionalProperties": false }, "ComponentRegistrationTypeEnum": { "enum": [ "Normal", "AnonymousAmlModule", "AnonymousAmlModuleVersion", "ModuleEntityOnly" ], "type": "string" }, "ComponentSpecMetaInfo": { "type": "object", "properties": { "componentSpec": { "nullable": true }, "componentVersion": { "type": "string", "nullable": true }, "isAnonymous": { "type": "boolean" }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "componentName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "isArchived": { "type": "boolean" } }, "additionalProperties": false }, "ComponentType": { "enum": [ "Unknown", "CommandComponent", "Command" ], "type": "string" }, "ComponentUpdateRequest": { "type": "object", "properties": { "originalModuleEntity": { "$ref": "#/components/schemas/ModuleEntity" }, "updateModuleEntity": { "$ref": "#/components/schemas/ModuleEntity" }, "moduleName": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "overwriteWithOriginalNameAndVersion": { "type": "boolean", "nullable": true }, "snapshotId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ComponentValidationRequest": { "type": "object", "properties": { "componentIdentifier": { "type": "string", "nullable": true }, "computeIdentity": { "$ref": "#/components/schemas/ComputeIdentityDto" }, "executionContextDto": { "$ref": "#/components/schemas/ExecutionContextDto" }, "environmentDefinition": { "$ref": "#/components/schemas/EnvironmentDefinitionDto" }, "dataPortDtos": { "type": "array", "items": { "$ref": "#/components/schemas/DataPortDto" }, "nullable": true } }, "additionalProperties": false }, "ComponentValidationResponse": { "type": "object", "properties": { "status": { "$ref": "#/components/schemas/ValidationStatus" }, "error": { "$ref": "#/components/schemas/ErrorResponse" } }, "additionalProperties": false }, "Compute": { "type": "object", "properties": { "target": { "type": "string", "nullable": true }, "targetType": { "type": "string", "nullable": true }, "vmSize": { "type": "string", "nullable": true }, "instanceType": { "type": "string", "nullable": true }, "instanceCount": { "type": "integer", "format": "int32", "nullable": true }, "gpuCount": { "type": "integer", "format": "int32", "nullable": true }, "priority": { "type": "string", "nullable": true }, "region": { "type": "string", "nullable": true }, "armId": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "ComputeConfiguration": { "type": "object", "properties": { "target": { "type": "string", "nullable": true }, "instanceCount": { "type": "integer", "format": "int32", "nullable": true }, "maxInstanceCount": { "type": "integer", "format": "int32", "nullable": true }, "isLocal": { "type": "boolean" }, "location": { "type": "string", "nullable": true }, "isClusterless": { "type": "boolean" }, "instanceType": { "type": "string", "nullable": true }, "instancePriority": { "type": "string", "nullable": true }, "jobPriority": { "type": "integer", "format": "int32", "nullable": true }, "shmSize": { "type": "string", "nullable": true }, "dockerArgs": { "type": "string", "nullable": true }, "locations": { "type": "array", "items": { "type": "string" }, "nullable": true }, "properties": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true } }, "additionalProperties": false }, "ComputeContract": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "type": { "type": "string", "nullable": true, "readOnly": true }, "location": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "identity": { "$ref": "#/components/schemas/ComputeIdentityContract" }, "properties": { "$ref": "#/components/schemas/ComputeProperties" } }, "additionalProperties": false }, "ComputeDetails": { "type": "object" }, "ComputeEnvironmentType": { "enum": [ "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN" ], "type": "string" }, "ComputeIdentityContract": { "type": "object", "properties": { "type": { "type": "string", "nullable": true }, "systemIdentityUrl": { "type": "string", "nullable": true }, "principalId": { "type": "string", "nullable": true }, "tenantId": { "type": "string", "nullable": true }, "clientId": { "type": "string", "nullable": true }, "clientSecretUrl": { "type": "string", "nullable": true }, "userAssignedIdentities": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ComputeRPUserAssignedIdentity" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "ComputeIdentityDto": { "type": "object", "properties": { "computeName": { "type": "string", "nullable": true }, "computeTargetType": { "$ref": "#/components/schemas/ComputeTargetType" }, "intellectualPropertyPublisher": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ComputeInfo": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "computeType": { "$ref": "#/components/schemas/ComputeEnvironmentType" }, "isSslEnabled": { "type": "boolean" }, "isGpuType": { "type": "boolean" }, "clusterPurpose": { "type": "string", "nullable": true }, "publicIpAddress": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ComputeProperties": { "required": [ "computeType" ], "type": "object", "properties": { "createdOn": { "type": "string", "format": "date-time" }, "modifiedOn": { "type": "string", "format": "date-time" }, "disableLocalAuth": { "type": "boolean" }, "description": { "type": "string", "nullable": true }, "resourceId": { "type": "string", "nullable": true }, "computeType": { "minLength": 1, "type": "string" }, "computeLocation": { "type": "string", "nullable": true }, "provisioningState": { "$ref": "#/components/schemas/ProvisioningState" }, "provisioningErrors": { "type": "array", "items": { "$ref": "#/components/schemas/ODataErrorResponse" }, "nullable": true }, "provisioningWarnings": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "isAttachedCompute": { "type": "boolean" }, "properties": { "$ref": "#/components/schemas/ComputeDetails" }, "status": { "$ref": "#/components/schemas/ComputeStatus" }, "warnings": { "type": "array", "items": { "$ref": "#/components/schemas/ComputeWarning" }, "nullable": true } }, "additionalProperties": false }, "ComputeRPUserAssignedIdentity": { "type": "object", "properties": { "principalId": { "type": "string", "nullable": true }, "tenantId": { "type": "string", "nullable": true }, "clientId": { "type": "string", "nullable": true }, "clientSecretUrl": { "type": "string", "nullable": true }, "resourceId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ComputeRequest": { "type": "object", "properties": { "nodeCount": { "type": "integer", "format": "int32", "nullable": true }, "gpuCount": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "ComputeSetting": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "computeType": { "$ref": "#/components/schemas/ComputeType" }, "batchAiComputeInfo": { "$ref": "#/components/schemas/BatchAiComputeInfo" }, "remoteDockerComputeInfo": { "$ref": "#/components/schemas/RemoteDockerComputeInfo" }, "hdiClusterComputeInfo": { "$ref": "#/components/schemas/HdiClusterComputeInfo" }, "mlcComputeInfo": { "$ref": "#/components/schemas/MlcComputeInfo" }, "databricksComputeInfo": { "$ref": "#/components/schemas/DatabricksComputeInfo" } }, "additionalProperties": false }, "ComputeStatus": { "type": "object", "properties": { "isStatusAvailable": { "type": "boolean", "readOnly": true }, "detailedStatus": { "nullable": true }, "error": { "$ref": "#/components/schemas/ODataError" } }, "additionalProperties": false }, "ComputeStatusDetail": { "type": "object", "properties": { "provisioningState": { "type": "string", "nullable": true }, "provisioningErrorMessage": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ComputeTargetType": { "enum": [ "Local", "Remote", "HdiCluster", "ContainerInstance", "AmlCompute", "ComputeInstance", "Cmk8s", "SynapseSpark", "Kubernetes", "Aisc", "GlobalJobDispatcher", "Databricks", "MockedCompute" ], "type": "string" }, "ComputeType": { "enum": [ "BatchAi", "MLC", "HdiCluster", "RemoteDocker", "Databricks", "Aisc" ], "type": "string" }, "ComputeWarning": { "type": "object", "properties": { "title": { "type": "string", "nullable": true }, "message": { "type": "string", "nullable": true }, "code": { "type": "string", "nullable": true }, "severity": { "$ref": "#/components/schemas/SeverityLevel" } }, "additionalProperties": false }, "ConfigValueType": { "enum": [ "String", "Secret" ], "type": "string" }, "ConnectionCategory": { "enum": [ "PythonFeed", "ACR", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "AzureDataLakeGen2", "Redis", "ApiKey", "AzureOpenAI", "CognitiveSearch", "CognitiveService", "CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", "CosmosDbMongoDbApi", "AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", "AzureSqlMi", "AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", "AmazonRedshift", "Db2", "Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", "Informix", "MariaDb", "MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", "PostgreSql", "Presto", "SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", "Sybase", "Teradata", "Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", "AmazonS3Compatible", "FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", "OracleCloudStorage", "Sftp", "GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", "Concur", "Dynamics", "DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", "Magento", "Marketo", "Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", "QuickBooks", "Salesforce", "SalesforceServiceCloud", "SalesforceMarketingCloud", "SapCloudForCustomer", "SapEcc", "ServiceNow", "SharePointOnlineList", "Shopify", "Square", "WebTable", "Xero", "Zoho", "GenericContainerRegistry" ], "type": "string" }, "ConnectionConfigSpec": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "configValueType": { "$ref": "#/components/schemas/ConfigValueType" }, "description": { "type": "string", "nullable": true }, "defaultValue": { "type": "string", "nullable": true }, "enumValues": { "type": "array", "items": { "type": "string" }, "nullable": true }, "isOptional": { "type": "boolean" } }, "additionalProperties": false }, "ConnectionDto": { "type": "object", "properties": { "connectionName": { "type": "string", "nullable": true }, "connectionType": { "$ref": "#/components/schemas/ConnectionType" }, "configs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "customConfigs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/CustomConnectionConfig" }, "description": "This is a dictionary", "nullable": true }, "expiryTime": { "type": "string", "format": "date-time", "nullable": true }, "owner": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" }, "createdDate": { "type": "string", "format": "date-time", "nullable": true }, "lastModifiedDate": { "type": "string", "format": "date-time", "nullable": true } }, "additionalProperties": false }, "ConnectionEntity": { "type": "object", "properties": { "connectionId": { "type": "string", "nullable": true }, "connectionName": { "type": "string", "nullable": true }, "connectionType": { "$ref": "#/components/schemas/ConnectionType" }, "connectionScope": { "$ref": "#/components/schemas/ConnectionScope" }, "configs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "customConfigs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/CustomConnectionConfig" }, "description": "This is a dictionary", "nullable": true }, "expiryTime": { "type": "string", "format": "date-time", "nullable": true }, "secretName": { "type": "string", "nullable": true }, "owner": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "ConnectionOverrideSetting": { "type": "object", "properties": { "connectionSourceType": { "$ref": "#/components/schemas/ConnectionSourceType" }, "nodeName": { "type": "string", "nullable": true }, "nodeInputName": { "type": "string", "nullable": true }, "nodeDeploymentNameInput": { "type": "string", "nullable": true }, "nodeModelInput": { "type": "string", "nullable": true }, "connectionName": { "type": "string", "nullable": true }, "deploymentName": { "type": "string", "nullable": true }, "model": { "type": "string", "nullable": true }, "connectionTypes": { "type": "array", "items": { "$ref": "#/components/schemas/ConnectionType" }, "nullable": true }, "capabilities": { "$ref": "#/components/schemas/AzureOpenAIModelCapabilities" }, "modelEnum": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "ConnectionScope": { "enum": [ "User", "WorkspaceShared" ], "type": "string" }, "ConnectionSourceType": { "enum": [ "Node", "NodeInput" ], "type": "string" }, "ConnectionSpec": { "type": "object", "properties": { "connectionType": { "$ref": "#/components/schemas/ConnectionType" }, "configSpecs": { "type": "array", "items": { "$ref": "#/components/schemas/ConnectionConfigSpec" }, "nullable": true } }, "additionalProperties": false }, "ConnectionType": { "enum": [ "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer" ], "type": "string" }, "ConsumeMode": { "enum": [ "Reference", "Copy", "CopyAndAutoUpgrade" ], "type": "string" }, "ContainerInstanceConfiguration": { "type": "object", "properties": { "region": { "type": "string", "nullable": true }, "cpuCores": { "type": "number", "format": "double" }, "memoryGb": { "type": "number", "format": "double" } }, "additionalProperties": false }, "ContainerRegistry": { "type": "object", "properties": { "address": { "type": "string", "nullable": true }, "username": { "type": "string", "nullable": true }, "password": { "type": "string", "nullable": true }, "credentialType": { "type": "string", "nullable": true }, "registryIdentity": { "$ref": "#/components/schemas/RegistryIdentity" } }, "additionalProperties": false }, "ContainerResourceRequirements": { "type": "object", "properties": { "cpu": { "type": "number", "format": "double", "nullable": true }, "cpuLimit": { "type": "number", "format": "double", "nullable": true }, "memoryInGB": { "type": "number", "format": "double", "nullable": true }, "memoryInGBLimit": { "type": "number", "format": "double", "nullable": true }, "gpuEnabled": { "type": "boolean", "nullable": true }, "gpu": { "type": "integer", "format": "int32", "nullable": true }, "fpga": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "ControlFlowType": { "enum": [ "None", "DoWhile", "ParallelFor" ], "type": "string" }, "ControlInput": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "defaultValue": { "$ref": "#/components/schemas/ControlInputValue" } }, "additionalProperties": false }, "ControlInputValue": { "enum": [ "None", "False", "True", "Skipped" ], "type": "string" }, "ControlOutput": { "type": "object", "properties": { "name": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ControlType": { "enum": [ "IfElse" ], "type": "string" }, "CopyDataTask": { "type": "object", "properties": { "DataCopyMode": { "$ref": "#/components/schemas/DataCopyMode" } }, "additionalProperties": false }, "CreateFlowFromSampleRequest": { "type": "object", "properties": { "flowName": { "type": "string", "nullable": true }, "sampleResourceId": { "type": "string", "nullable": true }, "flowDefinitionFilePath": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "isArchived": { "type": "boolean" } }, "additionalProperties": false }, "CreateFlowRequest": { "type": "object", "properties": { "flowName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "details": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "flow": { "$ref": "#/components/schemas/Flow" }, "flowDefinitionFilePath": { "type": "string", "nullable": true }, "flowType": { "$ref": "#/components/schemas/FlowType" }, "flowRunSettings": { "$ref": "#/components/schemas/FlowRunSettings" }, "isArchived": { "type": "boolean" }, "vmSize": { "type": "string", "nullable": true }, "maxIdleTimeSeconds": { "type": "integer", "format": "int64", "nullable": true }, "identity": { "type": "string", "nullable": true } }, "additionalProperties": false }, "CreateFlowRuntimeRequest": { "type": "object", "properties": { "runtimeType": { "$ref": "#/components/schemas/RuntimeType" }, "identity": { "$ref": "#/components/schemas/ManagedServiceIdentity" }, "instanceType": { "type": "string", "nullable": true }, "fromExistingEndpoint": { "type": "boolean" }, "fromExistingDeployment": { "type": "boolean" }, "endpointName": { "type": "string", "nullable": true }, "deploymentName": { "type": "string", "nullable": true }, "computeInstanceName": { "type": "string", "nullable": true }, "fromExistingCustomApp": { "type": "boolean" }, "customAppName": { "type": "string", "nullable": true }, "runtimeDescription": { "type": "string", "nullable": true }, "environment": { "type": "string", "nullable": true }, "instanceCount": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "CreateFlowSessionRequest": { "type": "object", "properties": { "pythonPipRequirements": { "type": "array", "items": { "type": "string" }, "nullable": true }, "baseImage": { "type": "string", "nullable": true }, "vmSize": { "type": "string", "nullable": true }, "maxIdleTimeSeconds": { "type": "integer", "format": "int64", "nullable": true }, "action": { "$ref": "#/components/schemas/SetupFlowSessionAction" }, "identity": { "type": "string", "nullable": true } }, "additionalProperties": false }, "CreateInferencePipelineRequest": { "type": "object", "properties": { "moduleNodeId": { "type": "string", "nullable": true }, "portName": { "type": "string", "nullable": true }, "trainingPipelineDraftName": { "type": "string", "nullable": true }, "trainingPipelineRunDisplayName": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "pipelineType": { "$ref": "#/components/schemas/PipelineType" }, "pipelineDraftMode": { "$ref": "#/components/schemas/PipelineDraftMode" }, "graphComponentsMode": { "$ref": "#/components/schemas/GraphComponentsMode" }, "subPipelinesInfo": { "$ref": "#/components/schemas/SubPipelinesInfo" }, "flattenedSubGraphs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/PipelineSubDraft" }, "nullable": true }, "pipelineParameters": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataPathAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/LegacyDataPath" }, "description": "This is a dictionary", "nullable": true }, "dataSetDefinitionValueAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "description": "This is a dictionary", "nullable": true }, "assetOutputSettingsAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AssetOutputSettings" }, "description": "This is a dictionary", "nullable": true }, "graph": { "$ref": "#/components/schemas/GraphDraftEntity" }, "pipelineRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameterAssignment" }, "nullable": true }, "moduleNodeRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeRunSetting" }, "nullable": true }, "moduleNodeUIInputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeUIInputSetting" }, "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "continueRunOnStepFailure": { "type": "boolean", "nullable": true }, "description": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "enforceRerun": { "type": "boolean", "nullable": true }, "datasetAccessModes": { "$ref": "#/components/schemas/DatasetAccessModes" } }, "additionalProperties": false }, "CreateOrUpdateConnectionRequest": { "type": "object", "properties": { "connectionType": { "$ref": "#/components/schemas/ConnectionType" }, "connectionScope": { "$ref": "#/components/schemas/ConnectionScope" }, "configs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "customConfigs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/CustomConnectionConfig" }, "description": "This is a dictionary", "nullable": true }, "expiryTime": { "type": "string", "format": "date-time", "nullable": true } }, "additionalProperties": false }, "CreateOrUpdateConnectionRequestDto": { "type": "object", "properties": { "connectionType": { "$ref": "#/components/schemas/ConnectionType" }, "configs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "customConfigs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/CustomConnectionConfig" }, "description": "This is a dictionary", "nullable": true }, "expiryTime": { "type": "string", "format": "date-time", "nullable": true } }, "additionalProperties": false }, "CreatePipelineDraftRequest": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "pipelineType": { "$ref": "#/components/schemas/PipelineType" }, "pipelineDraftMode": { "$ref": "#/components/schemas/PipelineDraftMode" }, "graphComponentsMode": { "$ref": "#/components/schemas/GraphComponentsMode" }, "subPipelinesInfo": { "$ref": "#/components/schemas/SubPipelinesInfo" }, "flattenedSubGraphs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/PipelineSubDraft" }, "nullable": true }, "pipelineParameters": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataPathAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/LegacyDataPath" }, "description": "This is a dictionary", "nullable": true }, "dataSetDefinitionValueAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "description": "This is a dictionary", "nullable": true }, "assetOutputSettingsAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AssetOutputSettings" }, "description": "This is a dictionary", "nullable": true }, "graph": { "$ref": "#/components/schemas/GraphDraftEntity" }, "pipelineRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameterAssignment" }, "nullable": true }, "moduleNodeRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeRunSetting" }, "nullable": true }, "moduleNodeUIInputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeUIInputSetting" }, "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "continueRunOnStepFailure": { "type": "boolean", "nullable": true }, "description": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "enforceRerun": { "type": "boolean", "nullable": true }, "datasetAccessModes": { "$ref": "#/components/schemas/DatasetAccessModes" } }, "additionalProperties": false }, "CreatePipelineJobScheduleDto": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "pipelineJobName": { "type": "string", "nullable": true }, "pipelineJobRuntimeSettings": { "$ref": "#/components/schemas/PipelineJobRuntimeBasicSettings" }, "displayName": { "type": "string", "nullable": true }, "triggerType": { "$ref": "#/components/schemas/TriggerType" }, "recurrence": { "$ref": "#/components/schemas/Recurrence" }, "cron": { "$ref": "#/components/schemas/Cron" }, "status": { "$ref": "#/components/schemas/ScheduleStatus" }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "CreatePublishedPipelineRequest": { "type": "object", "properties": { "usePipelineEndpoint": { "type": "boolean" }, "pipelineName": { "type": "string", "nullable": true }, "pipelineDescription": { "type": "string", "nullable": true }, "useExistingPipelineEndpoint": { "type": "boolean" }, "pipelineEndpointName": { "type": "string", "nullable": true }, "pipelineEndpointDescription": { "type": "string", "nullable": true }, "setAsDefaultPipelineForEndpoint": { "type": "boolean" }, "stepTags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "experimentName": { "type": "string", "nullable": true }, "pipelineParameters": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataPathAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/LegacyDataPath" }, "description": "This is a dictionary", "nullable": true }, "dataSetDefinitionValueAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "description": "This is a dictionary", "nullable": true }, "assetOutputSettingsAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AssetOutputSettings" }, "description": "This is a dictionary", "nullable": true }, "enableNotification": { "type": "boolean", "nullable": true }, "subPipelinesInfo": { "$ref": "#/components/schemas/SubPipelinesInfo" }, "displayName": { "type": "string", "nullable": true }, "runId": { "type": "string", "nullable": true }, "parentRunId": { "type": "string", "nullable": true }, "graph": { "$ref": "#/components/schemas/GraphDraftEntity" }, "pipelineRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameterAssignment" }, "nullable": true }, "moduleNodeRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeRunSetting" }, "nullable": true }, "moduleNodeUIInputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeUIInputSetting" }, "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "continueRunOnStepFailure": { "type": "boolean", "nullable": true }, "description": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "enforceRerun": { "type": "boolean", "nullable": true }, "datasetAccessModes": { "$ref": "#/components/schemas/DatasetAccessModes" } }, "additionalProperties": false }, "CreateRealTimeEndpointRequest": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "computeInfo": { "$ref": "#/components/schemas/ComputeInfo" }, "description": { "type": "string", "nullable": true }, "linkedPipelineDraftId": { "type": "string", "nullable": true }, "linkedPipelineRunId": { "type": "string", "nullable": true }, "aksAdvanceSettings": { "$ref": "#/components/schemas/AKSAdvanceSettings" }, "aciAdvanceSettings": { "$ref": "#/components/schemas/ACIAdvanceSettings" }, "linkedTrainingPipelineRunId": { "type": "string", "nullable": true }, "linkedExperimentName": { "type": "string", "nullable": true }, "graphNodesRunIdMapping": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "workflow": { "$ref": "#/components/schemas/PipelineGraph" }, "inputs": { "type": "array", "items": { "$ref": "#/components/schemas/InputOutputPortMetadata" }, "nullable": true }, "outputs": { "type": "array", "items": { "$ref": "#/components/schemas/InputOutputPortMetadata" }, "nullable": true }, "exampleRequest": { "$ref": "#/components/schemas/ExampleRequest" }, "userStorageConnectionString": { "type": "string", "nullable": true }, "userStorageEndpointUri": { "type": "string", "format": "uri", "nullable": true }, "userStorageWorkspaceSaiToken": { "type": "string", "nullable": true }, "userStorageContainerName": { "type": "string", "nullable": true }, "pipelineRunId": { "type": "string", "nullable": true }, "rootPipelineRunId": { "type": "string", "nullable": true }, "experimentName": { "type": "string", "nullable": true }, "experimentId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "CreatedBy": { "type": "object", "properties": { "userObjectId": { "type": "string", "nullable": true }, "userTenantId": { "type": "string", "nullable": true }, "userName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "CreatedFromDto": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/CreatedFromType" }, "locationType": { "$ref": "#/components/schemas/CreatedFromLocationType" }, "location": { "type": "string", "nullable": true } }, "additionalProperties": false }, "CreatedFromLocationType": { "enum": [ "ArtifactId" ], "type": "string" }, "CreatedFromType": { "enum": [ "Notebook" ], "type": "string" }, "CreationContext": { "type": "object", "properties": { "createdTime": { "type": "string", "format": "date-time" }, "createdBy": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" }, "creationSource": { "type": "string", "nullable": true } } }, "Cron": { "type": "object", "properties": { "expression": { "type": "string", "nullable": true }, "endTime": { "type": "string", "nullable": true }, "startTime": { "type": "string", "nullable": true }, "timeZone": { "type": "string", "nullable": true } }, "additionalProperties": false }, "CustomConnectionConfig": { "type": "object", "properties": { "configValueType": { "$ref": "#/components/schemas/ConfigValueType" }, "value": { "type": "string", "nullable": true } }, "additionalProperties": false }, "CustomReference": { "type": "object", "properties": { "amlDataStoreName": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DBFSReference": { "type": "object", "properties": { "relativePath": { "type": "string", "nullable": true }, "amlDataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "Data": { "type": "object", "properties": { "dataLocation": { "$ref": "#/components/schemas/ExecutionDataLocation" }, "mechanism": { "$ref": "#/components/schemas/DeliveryMechanism" }, "environmentVariableName": { "type": "string", "nullable": true }, "pathOnCompute": { "type": "string", "nullable": true }, "overwrite": { "type": "boolean" }, "options": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } }, "additionalProperties": false }, "DataBindingMode": { "enum": [ "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload" ], "type": "string" }, "DataCategory": { "enum": [ "All", "Dataset", "Model" ], "type": "string" }, "DataCopyMode": { "enum": [ "MergeWithOverwrite", "FailIfConflict" ], "type": "string" }, "DataInfo": { "type": "object", "properties": { "feedName": { "type": "string", "nullable": true }, "id": { "type": "string", "nullable": true }, "dataSourceType": { "$ref": "#/components/schemas/DataSourceType" }, "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "dataTypeId": { "type": "string", "nullable": true }, "amlDataStoreName": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time", "nullable": true }, "modifiedDate": { "type": "string", "format": "date-time", "nullable": true }, "registeredBy": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "createdByStudio": { "type": "boolean", "nullable": true }, "dataReferenceType": { "$ref": "#/components/schemas/DataReferenceType" }, "datasetType": { "type": "string", "nullable": true }, "savedDatasetId": { "type": "string", "nullable": true }, "datasetVersionId": { "type": "string", "nullable": true }, "isVisible": { "type": "boolean" }, "isRegistered": { "type": "boolean" }, "properties": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary", "nullable": true }, "connectionString": { "type": "string", "nullable": true }, "containerName": { "type": "string", "nullable": true }, "dataStorageEndpointUri": { "type": "string", "format": "uri", "nullable": true }, "workspaceSaiToken": { "type": "string", "nullable": true }, "amlDatasetDataFlow": { "type": "string", "nullable": true }, "systemData": { "$ref": "#/components/schemas/SystemData" }, "armId": { "type": "string", "nullable": true }, "assetId": { "type": "string", "nullable": true }, "assetUri": { "type": "string", "nullable": true }, "assetType": { "type": "string", "nullable": true }, "isDataV2": { "type": "boolean", "nullable": true }, "assetScopeType": { "$ref": "#/components/schemas/AssetScopeTypes" }, "pipelineRunId": { "type": "string", "nullable": true }, "moduleNodeId": { "type": "string", "nullable": true }, "outputPortName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DataLocation": { "type": "object", "properties": { "storageType": { "$ref": "#/components/schemas/DataLocationStorageType" }, "storageId": { "type": "string", "nullable": true }, "uri": { "type": "string", "nullable": true }, "dataStoreName": { "type": "string", "nullable": true }, "dataReference": { "$ref": "#/components/schemas/DataReference" }, "amlDataset": { "$ref": "#/components/schemas/AmlDataset" }, "assetDefinition": { "$ref": "#/components/schemas/AssetDefinition" } }, "additionalProperties": false }, "DataLocationStorageType": { "enum": [ "None", "AzureBlob", "Artifact", "Snapshot", "SavedAmlDataset", "Asset" ], "type": "string" }, "DataPath": { "type": "object", "properties": { "dataStoreName": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "sqlDataPath": { "$ref": "#/components/schemas/SqlDataPath" } }, "additionalProperties": false }, "DataPathParameter": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "documentation": { "type": "string", "nullable": true }, "defaultValue": { "$ref": "#/components/schemas/LegacyDataPath" }, "isOptional": { "type": "boolean" }, "dataTypeId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DataPortDto": { "type": "object", "properties": { "dataPortType": { "$ref": "#/components/schemas/DataPortType" }, "dataPortName": { "type": "string", "nullable": true }, "dataStoreName": { "type": "string", "nullable": true }, "dataStoreIntellectualPropertyAccessMode": { "$ref": "#/components/schemas/IntellectualPropertyAccessMode" }, "dataStoreIntellectualPropertyPublisher": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DataPortType": { "enum": [ "Input", "Output" ], "type": "string" }, "DataReference": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/DataReferenceType" }, "azureBlobReference": { "$ref": "#/components/schemas/AzureBlobReference" }, "azureDataLakeReference": { "$ref": "#/components/schemas/AzureDataLakeReference" }, "azureFilesReference": { "$ref": "#/components/schemas/AzureFilesReference" }, "azureSqlDatabaseReference": { "$ref": "#/components/schemas/AzureDatabaseReference" }, "azurePostgresDatabaseReference": { "$ref": "#/components/schemas/AzureDatabaseReference" }, "azureDataLakeGen2Reference": { "$ref": "#/components/schemas/AzureDataLakeGen2Reference" }, "dbfsReference": { "$ref": "#/components/schemas/DBFSReference" }, "azureMySqlDatabaseReference": { "$ref": "#/components/schemas/AzureDatabaseReference" }, "customReference": { "$ref": "#/components/schemas/CustomReference" }, "hdfsReference": { "$ref": "#/components/schemas/HdfsReference" } }, "additionalProperties": false }, "DataReferenceConfiguration": { "type": "object", "properties": { "dataStoreName": { "type": "string", "nullable": true }, "mode": { "$ref": "#/components/schemas/DataStoreMode" }, "pathOnDataStore": { "type": "string", "nullable": true }, "pathOnCompute": { "type": "string", "nullable": true }, "overwrite": { "type": "boolean" } }, "additionalProperties": false }, "DataReferenceType": { "enum": [ "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs" ], "type": "string" }, "DataSetDefinition": { "type": "object", "properties": { "dataTypeShortName": { "type": "string", "nullable": true }, "parameterName": { "type": "string", "nullable": true }, "value": { "$ref": "#/components/schemas/DataSetDefinitionValue" } }, "additionalProperties": false }, "DataSetDefinitionValue": { "type": "object", "properties": { "literalValue": { "$ref": "#/components/schemas/DataPath" }, "dataSetReference": { "$ref": "#/components/schemas/RegisteredDataSetReference" }, "savedDataSetReference": { "$ref": "#/components/schemas/SavedDataSetReference" }, "assetDefinition": { "$ref": "#/components/schemas/AssetDefinition" } }, "additionalProperties": false }, "DataSetPathParameter": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "documentation": { "type": "string", "nullable": true }, "defaultValue": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "isOptional": { "type": "boolean" } }, "additionalProperties": false }, "DataSettings": { "type": "object", "properties": { "targetColumnName": { "type": "string", "nullable": true }, "weightColumnName": { "type": "string", "nullable": true }, "positiveLabel": { "type": "string", "nullable": true }, "validationData": { "$ref": "#/components/schemas/ValidationDataSettings" }, "testData": { "$ref": "#/components/schemas/TestDataSettings" } }, "additionalProperties": false }, "DataSourceType": { "enum": [ "None", "PipelineDataSource", "AmlDataset", "GlobalDataset", "FeedModel", "FeedDataset", "AmlDataVersion", "AMLModelVersion" ], "type": "string" }, "DataStoreMode": { "enum": [ "Mount", "Download", "Upload" ], "type": "string" }, "DataTransferCloudConfiguration": { "type": "object", "properties": { "AllowOverwrite": { "type": "boolean", "nullable": true } }, "additionalProperties": false }, "DataTransferSink": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/DataTransferStorageType" }, "fileSystem": { "$ref": "#/components/schemas/FileSystem" }, "databaseSink": { "$ref": "#/components/schemas/DatabaseSink" } }, "additionalProperties": false }, "DataTransferSource": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/DataTransferStorageType" }, "fileSystem": { "$ref": "#/components/schemas/FileSystem" }, "databaseSource": { "$ref": "#/components/schemas/DatabaseSource" } }, "additionalProperties": false }, "DataTransferStorageType": { "enum": [ "DataBase", "FileSystem" ], "type": "string" }, "DataTransferTaskType": { "enum": [ "ImportData", "ExportData", "CopyData" ], "type": "string" }, "DataTransferV2CloudSetting": { "type": "object", "properties": { "taskType": { "$ref": "#/components/schemas/DataTransferTaskType" }, "ComputeName": { "type": "string", "nullable": true }, "CopyDataTask": { "$ref": "#/components/schemas/CopyDataTask" }, "ImportDataTask": { "$ref": "#/components/schemas/ImportDataTask" }, "ExportDataTask": { "$ref": "#/components/schemas/ExportDataTask" }, "DataTransferSources": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataTransferSource" }, "description": "This is a dictionary", "nullable": true }, "DataTransferSinks": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataTransferSink" }, "description": "This is a dictionary", "nullable": true }, "DataCopyMode": { "$ref": "#/components/schemas/DataCopyMode" } }, "additionalProperties": false }, "DataTypeCreationInfo": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "isDirectory": { "type": "boolean" }, "fileExtension": { "type": "string", "nullable": true }, "parentDataTypeIds": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "DataTypeMechanism": { "enum": [ "ErrorWhenNotExisting", "RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly" ], "type": "string" }, "DatabaseSink": { "type": "object", "properties": { "connection": { "type": "string", "nullable": true }, "table": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DatabaseSource": { "type": "object", "properties": { "connection": { "type": "string", "nullable": true }, "query": { "type": "string", "nullable": true }, "storedProcedureName": { "type": "string", "nullable": true }, "storedProcedureParameters": { "type": "array", "items": { "$ref": "#/components/schemas/StoredProcedureParameter" }, "nullable": true } }, "additionalProperties": false }, "DatabricksComputeInfo": { "type": "object", "properties": { "existingClusterId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DatabricksConfiguration": { "type": "object", "properties": { "workers": { "type": "integer", "format": "int32" }, "minimumWorkerCount": { "type": "integer", "format": "int32" }, "maxMumWorkerCount": { "type": "integer", "format": "int32" }, "sparkVersion": { "type": "string", "nullable": true }, "nodeTypeId": { "type": "string", "nullable": true }, "sparkConf": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "sparkEnvVars": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "clusterLogConfDbfsPath": { "type": "string", "nullable": true }, "dbfsInitScripts": { "type": "array", "items": { "$ref": "#/components/schemas/InitScriptInfoDto" }, "nullable": true }, "instancePoolId": { "type": "string", "nullable": true }, "timeoutSeconds": { "type": "integer", "format": "int32" }, "notebookTask": { "$ref": "#/components/schemas/NoteBookTaskDto" }, "sparkPythonTask": { "$ref": "#/components/schemas/SparkPythonTaskDto" }, "sparkJarTask": { "$ref": "#/components/schemas/SparkJarTaskDto" }, "sparkSubmitTask": { "$ref": "#/components/schemas/SparkSubmitTaskDto" }, "jarLibraries": { "type": "array", "items": { "type": "string" }, "nullable": true }, "eggLibraries": { "type": "array", "items": { "type": "string" }, "nullable": true }, "whlLibraries": { "type": "array", "items": { "type": "string" }, "nullable": true }, "pypiLibraries": { "type": "array", "items": { "$ref": "#/components/schemas/PythonPyPiOrRCranLibraryDto" }, "nullable": true }, "rCranLibraries": { "type": "array", "items": { "$ref": "#/components/schemas/PythonPyPiOrRCranLibraryDto" }, "nullable": true }, "mavenLibraries": { "type": "array", "items": { "$ref": "#/components/schemas/MavenLibraryDto" }, "nullable": true }, "libraries": { "type": "array", "items": { }, "nullable": true }, "linkedADBWorkspaceMetadata": { "$ref": "#/components/schemas/LinkedADBWorkspaceMetadata" }, "databrickResourceId": { "type": "string", "nullable": true }, "autoScale": { "type": "boolean" } }, "additionalProperties": false }, "DatacacheConfiguration": { "type": "object", "properties": { "datacacheId": { "type": "string", "format": "uuid" }, "datacacheStore": { "type": "string", "nullable": true }, "datasetId": { "type": "string", "format": "uuid" }, "mode": { "$ref": "#/components/schemas/DatacacheMode" }, "replica": { "type": "integer", "format": "int32", "nullable": true }, "failureFallback": { "type": "boolean" }, "pathOnCompute": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DatacacheMode": { "enum": [ "Mount" ], "type": "string" }, "DatasetAccessModes": { "enum": [ "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset" ], "type": "string" }, "DatasetConsumptionType": { "enum": [ "RunInput", "Reference" ], "type": "string" }, "DatasetDeliveryMechanism": { "enum": [ "Direct", "Mount", "Download", "Hdfs" ], "type": "string" }, "DatasetIdentifier": { "type": "object", "properties": { "savedId": { "type": "string", "nullable": true }, "registeredId": { "type": "string", "nullable": true }, "registeredVersion": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DatasetInputDetails": { "type": "object", "properties": { "inputName": { "type": "string", "nullable": true }, "mechanism": { "$ref": "#/components/schemas/DatasetDeliveryMechanism" }, "pathOnCompute": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DatasetLineage": { "type": "object", "properties": { "identifier": { "$ref": "#/components/schemas/DatasetIdentifier" }, "consumptionType": { "$ref": "#/components/schemas/DatasetConsumptionType" }, "inputDetails": { "$ref": "#/components/schemas/DatasetInputDetails" } }, "additionalProperties": false }, "DatasetOutput": { "type": "object", "properties": { "datasetType": { "$ref": "#/components/schemas/DatasetType" }, "datasetRegistration": { "$ref": "#/components/schemas/DatasetRegistration" }, "datasetOutputOptions": { "$ref": "#/components/schemas/DatasetOutputOptions" } }, "additionalProperties": false }, "DatasetOutputDetails": { "type": "object", "properties": { "outputName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DatasetOutputOptions": { "type": "object", "properties": { "sourceGlobs": { "$ref": "#/components/schemas/GlobsOptions" }, "pathOnDatastore": { "type": "string", "nullable": true }, "PathOnDatastoreParameterAssignment": { "$ref": "#/components/schemas/ParameterAssignment" } }, "additionalProperties": false }, "DatasetOutputType": { "enum": [ "RunOutput", "Reference" ], "type": "string" }, "DatasetRegistration": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "createNewVersion": { "type": "boolean" }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "additionalTransformations": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DatasetRegistrationOptions": { "type": "object", "properties": { "additionalTransformation": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DatasetType": { "enum": [ "File", "Tabular" ], "type": "string" }, "DatastoreSetting": { "type": "object", "properties": { "dataStoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DbfsStorageInfoDto": { "type": "object", "properties": { "destination": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DebugInfoResponse": { "type": "object", "properties": { "type": { "type": "string", "description": "The type.", "nullable": true }, "message": { "type": "string", "description": "The message.", "nullable": true }, "stackTrace": { "type": "string", "description": "The stack trace.", "nullable": true }, "innerException": { "$ref": "#/components/schemas/DebugInfoResponse" }, "data": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary", "nullable": true }, "errorResponse": { "$ref": "#/components/schemas/ErrorResponse" } }, "additionalProperties": false, "description": "Internal debugging information not intended for external clients." }, "DeliveryMechanism": { "enum": [ "Direct", "Mount", "Download", "Hdfs" ], "type": "string" }, "DeployFlowRequest": { "type": "object", "properties": { "sourceResourceId": { "type": "string", "nullable": true }, "sourceFlowRunId": { "type": "string", "nullable": true }, "sourceFlowId": { "type": "string", "nullable": true }, "flow": { "$ref": "#/components/schemas/Flow" }, "flowType": { "$ref": "#/components/schemas/FlowType" }, "flowSubmitRunSettings": { "$ref": "#/components/schemas/FlowSubmitRunSettings" }, "outputNamesIncludedInEndpointResponse": { "type": "array", "items": { "type": "string" }, "nullable": true }, "endpointName": { "type": "string", "nullable": true }, "endpointDescription": { "type": "string", "nullable": true }, "authMode": { "$ref": "#/components/schemas/EndpointAuthMode" }, "identity": { "$ref": "#/components/schemas/ManagedServiceIdentity" }, "endpointTags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "connectionOverrides": { "type": "array", "items": { "$ref": "#/components/schemas/ConnectionOverrideSetting" }, "nullable": true }, "useWorkspaceConnection": { "type": "boolean" }, "deploymentName": { "type": "string", "nullable": true }, "environment": { "type": "string", "nullable": true }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "deploymentTags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "appInsightsEnabled": { "type": "boolean" }, "enableModelDataCollector": { "type": "boolean" }, "skipUpdateTrafficToFull": { "type": "boolean" }, "enableStreamingResponse": { "type": "boolean" }, "useFlowSnapshotToDeploy": { "type": "boolean" }, "instanceType": { "type": "string", "nullable": true }, "instanceCount": { "type": "integer", "format": "int32" }, "autoGrantConnectionPermission": { "type": "boolean" } }, "additionalProperties": false }, "DeploymentInfo": { "type": "object", "properties": { "operationId": { "type": "string", "nullable": true }, "serviceId": { "type": "string", "nullable": true }, "serviceName": { "type": "string", "nullable": true }, "statusDetail": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DistributionConfiguration": { "type": "object", "properties": { "distributionType": { "$ref": "#/components/schemas/DistributionType" } }, "additionalProperties": false }, "DistributionParameter": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "label": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "inputType": { "$ref": "#/components/schemas/DistributionParameterEnum" } }, "additionalProperties": false }, "DistributionParameterEnum": { "enum": [ "Text", "Number" ], "type": "string" }, "DistributionType": { "enum": [ "PyTorch", "TensorFlow", "Mpi", "Ray" ], "type": "string" }, "DoWhileControlFlowInfo": { "type": "object", "properties": { "outputPortNameToInputPortNamesMapping": { "type": "object", "additionalProperties": { "type": "array", "items": { "type": "string" }, "nullable": true }, "nullable": true }, "conditionOutputPortName": { "type": "string", "nullable": true }, "runSettings": { "$ref": "#/components/schemas/DoWhileControlFlowRunSettings" } }, "additionalProperties": false }, "DoWhileControlFlowRunSettings": { "type": "object", "properties": { "maxLoopIterationCount": { "$ref": "#/components/schemas/ParameterAssignment" } }, "additionalProperties": false }, "DockerBuildContext": { "type": "object", "properties": { "locationType": { "$ref": "#/components/schemas/BuildContextLocationType" }, "location": { "type": "string", "nullable": true }, "dockerfilePath": { "type": "string", "default": "Dockerfile", "nullable": true } }, "additionalProperties": false }, "DockerConfiguration": { "type": "object", "properties": { "useDocker": { "type": "boolean", "nullable": true }, "sharedVolumes": { "type": "boolean", "nullable": true }, "arguments": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "DockerImagePlatform": { "type": "object", "properties": { "os": { "type": "string", "nullable": true }, "architecture": { "type": "string", "nullable": true } }, "additionalProperties": false }, "DockerSection": { "type": "object", "properties": { "baseImage": { "type": "string", "nullable": true }, "platform": { "$ref": "#/components/schemas/DockerImagePlatform" }, "baseDockerfile": { "type": "string", "nullable": true }, "buildContext": { "$ref": "#/components/schemas/DockerBuildContext" }, "baseImageRegistry": { "$ref": "#/components/schemas/ContainerRegistry" } }, "additionalProperties": false }, "DockerSettingConfiguration": { "type": "object", "properties": { "useDocker": { "type": "boolean", "nullable": true }, "sharedVolumes": { "type": "boolean", "nullable": true }, "shmSize": { "type": "string", "nullable": true }, "arguments": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "DownloadResourceInfo": { "type": "object", "properties": { "downloadUrl": { "type": "string", "nullable": true }, "size": { "type": "integer", "format": "int64" } }, "additionalProperties": false }, "EPRPipelineRunErrorClassificationRequest": { "type": "object", "properties": { "rootRunId": { "type": "string", "nullable": true }, "runId": { "type": "string", "nullable": true }, "taskResult": { "type": "string", "nullable": true }, "failureType": { "type": "string", "nullable": true }, "failureName": { "type": "string", "nullable": true }, "responsibleTeam": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ETag": { "type": "object", "additionalProperties": false }, "EarlyTerminationPolicyType": { "enum": [ "Bandit", "MedianStopping", "TruncationSelection" ], "type": "string" }, "EmailNotificationEnableType": { "enum": [ "JobCompleted", "JobFailed", "JobCancelled" ], "type": "string" }, "EndpointAuthMode": { "enum": [ "AMLToken", "Key", "AADToken" ], "type": "string" }, "EndpointSetting": { "type": "object", "properties": { "type": { "type": "string", "nullable": true }, "port": { "type": "integer", "format": "int32", "nullable": true }, "sslThumbprint": { "type": "string", "nullable": true }, "endpoint": { "type": "string", "nullable": true }, "proxyEndpoint": { "type": "string", "nullable": true }, "status": { "type": "string", "nullable": true }, "errorMessage": { "type": "string", "nullable": true }, "enabled": { "type": "boolean", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "nodes": { "type": "string", "nullable": true } }, "additionalProperties": false }, "EntityInterface": { "type": "object", "properties": { "parameters": { "type": "array", "items": { "$ref": "#/components/schemas/Parameter" }, "nullable": true }, "ports": { "$ref": "#/components/schemas/NodePortInterface" }, "metadataParameters": { "type": "array", "items": { "$ref": "#/components/schemas/Parameter" }, "nullable": true }, "dataPathParameters": { "type": "array", "items": { "$ref": "#/components/schemas/DataPathParameter" }, "nullable": true }, "dataPathParameterList": { "type": "array", "items": { "$ref": "#/components/schemas/DataSetPathParameter" }, "nullable": true }, "AssetOutputSettingsParameterList": { "type": "array", "items": { "$ref": "#/components/schemas/AssetOutputSettingsParameter" }, "nullable": true } }, "additionalProperties": false }, "EntityKind": { "enum": [ "Invalid", "LineageRoot", "Versioned", "Unversioned" ], "type": "string" }, "EntityStatus": { "enum": [ "Active", "Deprecated", "Disabled" ], "type": "string" }, "EntrySetting": { "type": "object", "properties": { "file": { "type": "string", "nullable": true }, "className": { "type": "string", "nullable": true } }, "additionalProperties": false }, "EnumParameterRule": { "type": "object", "properties": { "validValues": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "EnvironmentConfiguration": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "useEnvironmentDefinition": { "type": "boolean" }, "environmentDefinitionString": { "type": "string", "nullable": true } }, "additionalProperties": false }, "EnvironmentDefinition": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "assetId": { "type": "string", "nullable": true }, "autoRebuild": { "type": "boolean", "nullable": true }, "python": { "$ref": "#/components/schemas/PythonSection" }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "docker": { "$ref": "#/components/schemas/DockerSection" }, "spark": { "$ref": "#/components/schemas/SparkSection" }, "r": { "$ref": "#/components/schemas/RSection" }, "inferencingStackVersion": { "type": "string", "nullable": true } }, "additionalProperties": false }, "EnvironmentDefinitionDto": { "type": "object", "properties": { "environmentName": { "type": "string", "nullable": true }, "environmentVersion": { "type": "string", "nullable": true }, "intellectualPropertyPublisher": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ErrorAdditionalInfo": { "type": "object", "properties": { "type": { "type": "string", "description": "The additional info type.", "nullable": true }, "info": { "description": "The additional info.", "nullable": true } }, "additionalProperties": false, "description": "The resource management error additional info." }, "ErrorHandlingMode": { "enum": [ "DefaultInterpolation", "CustomerFacingInterpolation" ], "type": "string" }, "ErrorResponse": { "type": "object", "properties": { "error": { "$ref": "#/components/schemas/RootError" }, "correlation": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "description": "Dictionary containing correlation details for the error.", "nullable": true }, "environment": { "type": "string", "description": "The hosting environment.", "nullable": true }, "location": { "type": "string", "description": "The Azure region.", "nullable": true }, "time": { "type": "string", "description": "The time in UTC.", "format": "date-time" }, "componentName": { "type": "string", "description": "Component name where error originated/encountered.", "nullable": true } }, "description": "The error response." }, "EsCloudConfiguration": { "type": "object", "properties": { "enableOutputToFileBasedOnDataTypeId": { "type": "boolean", "nullable": true }, "environment": { "$ref": "#/components/schemas/EnvironmentConfiguration" }, "hyperDriveConfiguration": { "$ref": "#/components/schemas/HyperDriveConfiguration" }, "k8sConfig": { "$ref": "#/components/schemas/K8sConfiguration" }, "resourceConfig": { "$ref": "#/components/schemas/AEVAResourceConfiguration" }, "torchDistributedConfig": { "$ref": "#/components/schemas/TorchDistributedConfiguration" }, "targetSelectorConfig": { "$ref": "#/components/schemas/TargetSelectorConfiguration" }, "dockerConfig": { "$ref": "#/components/schemas/DockerSettingConfiguration" }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "maxRunDurationSeconds": { "type": "integer", "format": "int32", "nullable": true }, "identity": { "$ref": "#/components/schemas/IdentitySetting" }, "applicationEndpoints": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ApplicationEndpointConfiguration" }, "nullable": true }, "runConfig": { "type": "string", "nullable": true } }, "additionalProperties": false }, "EvaluationFlowRunSettings": { "type": "object", "properties": { "flowRunId": { "type": "string", "nullable": true }, "flowRunDisplayName": { "type": "string", "nullable": true }, "batchDataInput": { "$ref": "#/components/schemas/BatchDataInput" }, "inputsMapping": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataInputs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "connectionOverrides": { "type": "array", "items": { "$ref": "#/components/schemas/ConnectionOverrideSetting" }, "nullable": true }, "runtimeName": { "type": "string", "nullable": true }, "amlComputeName": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "ExampleRequest": { "type": "object", "properties": { "inputs": { "type": "object", "additionalProperties": { "type": "array", "items": { "type": "array", "items": { } } }, "description": "This is a dictionary", "nullable": true }, "globalParameters": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "ExecutionContextDto": { "type": "object", "properties": { "executable": { "type": "string", "nullable": true }, "userCode": { "type": "string", "nullable": true }, "arguments": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ExecutionDataLocation": { "type": "object", "properties": { "dataset": { "$ref": "#/components/schemas/RunDatasetReference" }, "dataPath": { "$ref": "#/components/schemas/ExecutionDataPath" }, "uri": { "$ref": "#/components/schemas/UriReference" }, "type": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ExecutionDataPath": { "type": "object", "properties": { "datastoreName": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ExecutionGlobsOptions": { "type": "object", "properties": { "globPatterns": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "ExecutionPhase": { "enum": [ "Execution", "Initialization", "Finalization" ], "type": "string" }, "ExperimentComputeMetaInfo": { "type": "object", "properties": { "currentNodeCount": { "type": "integer", "format": "int32" }, "targetNodeCount": { "type": "integer", "format": "int32" }, "maxNodeCount": { "type": "integer", "format": "int32" }, "minNodeCount": { "type": "integer", "format": "int32" }, "idleNodeCount": { "type": "integer", "format": "int32" }, "runningNodeCount": { "type": "integer", "format": "int32" }, "preparingNodeCount": { "type": "integer", "format": "int32" }, "unusableNodeCount": { "type": "integer", "format": "int32" }, "leavingNodeCount": { "type": "integer", "format": "int32" }, "preemptedNodeCount": { "type": "integer", "format": "int32" }, "vmSize": { "type": "string", "nullable": true }, "location": { "type": "string", "nullable": true }, "provisioningState": { "type": "string", "nullable": true }, "state": { "type": "string", "nullable": true }, "osType": { "type": "string", "nullable": true }, "id": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "createdByStudio": { "type": "boolean" }, "isGpuType": { "type": "boolean" }, "resourceId": { "type": "string", "nullable": true }, "computeType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ExperimentInfo": { "type": "object", "properties": { "experimentName": { "type": "string", "nullable": true }, "experimentId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ExportComponentMetaInfo": { "type": "object", "properties": { "moduleEntity": { "$ref": "#/components/schemas/ModuleEntity" }, "moduleVersion": { "type": "string", "nullable": true }, "isAnonymous": { "type": "boolean", "nullable": true } }, "additionalProperties": false }, "ExportDataTask": { "type": "object", "properties": { "DataTransferSink": { "$ref": "#/components/schemas/DataTransferSink" } }, "additionalProperties": false }, "ExtensibleObject": { "type": "object" }, "FeaturizationMode": { "enum": [ "Auto", "Custom", "Off" ], "type": "string" }, "FeaturizationSettings": { "type": "object", "properties": { "mode": { "$ref": "#/components/schemas/FeaturizationMode" }, "blockedTransformers": { "type": "array", "items": { "type": "string" }, "nullable": true }, "columnPurposes": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "dropColumns": { "type": "array", "items": { "type": "string" }, "nullable": true }, "transformerParams": { "type": "object", "additionalProperties": { "type": "array", "items": { "$ref": "#/components/schemas/ColumnTransformer" }, "nullable": true }, "nullable": true }, "datasetLanguage": { "type": "string", "nullable": true }, "enableDnnFeaturization": { "type": "boolean", "nullable": true } }, "additionalProperties": false }, "FeedDto": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "sharingScopes": { "type": "array", "items": { "$ref": "#/components/schemas/SharingScope" }, "nullable": true }, "supportedAssetTypes": { "type": "object", "properties": { "Component": { "$ref": "#/components/schemas/AssetTypeMetaInfo" }, "Model": { "$ref": "#/components/schemas/AssetTypeMetaInfo" }, "Environment": { "$ref": "#/components/schemas/AssetTypeMetaInfo" }, "Dataset": { "$ref": "#/components/schemas/AssetTypeMetaInfo" }, "DataStore": { "$ref": "#/components/schemas/AssetTypeMetaInfo" }, "SampleGraph": { "$ref": "#/components/schemas/AssetTypeMetaInfo" }, "FlowTool": { "$ref": "#/components/schemas/AssetTypeMetaInfo" }, "FlowToolSetting": { "$ref": "#/components/schemas/AssetTypeMetaInfo" }, "FlowConnection": { "$ref": "#/components/schemas/AssetTypeMetaInfo" }, "FlowSample": { "$ref": "#/components/schemas/AssetTypeMetaInfo" }, "FlowRuntimeSpec": { "$ref": "#/components/schemas/AssetTypeMetaInfo" } }, "additionalProperties": false, "nullable": true }, "regionalWorkspaceStorage": { "type": "object", "additionalProperties": { "type": "array", "items": { "type": "string" } }, "description": "This is a dictionary", "nullable": true }, "intellectualPropertyPublisher": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FileSystem": { "type": "object", "properties": { "connection": { "type": "string", "nullable": true }, "path": { "type": "string", "nullable": true } }, "additionalProperties": false }, "Flow": { "type": "object", "properties": { "sourceResourceId": { "type": "string", "nullable": true }, "flowGraph": { "$ref": "#/components/schemas/FlowGraph" }, "nodeVariants": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/NodeVariant" }, "description": "This is a dictionary", "nullable": true }, "flowGraphLayout": { "$ref": "#/components/schemas/FlowGraphLayout" }, "bulkTestData": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "evaluationFlows": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowGraphReference" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "FlowAnnotations": { "type": "object", "properties": { "flowName": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" }, "owner": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" }, "isArchived": { "type": "boolean" }, "vmSize": { "type": "string", "nullable": true }, "maxIdleTimeSeconds": { "type": "integer", "format": "int64", "nullable": true }, "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "archived": { "type": "boolean" }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } } }, "FlowBaseDto": { "type": "object", "properties": { "flowId": { "type": "string", "nullable": true }, "flowName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "flowType": { "$ref": "#/components/schemas/FlowType" }, "experimentId": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" }, "owner": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" }, "flowResourceId": { "type": "string", "nullable": true }, "isArchived": { "type": "boolean" }, "flowDefinitionFilePath": { "type": "string", "nullable": true }, "vmSize": { "type": "string", "nullable": true }, "maxIdleTimeSeconds": { "type": "integer", "format": "int64", "nullable": true }, "identity": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowDto": { "type": "object", "properties": { "timestamp": { "type": "string", "format": "date-time", "nullable": true }, "eTag": { "$ref": "#/components/schemas/ETag" }, "flow": { "$ref": "#/components/schemas/Flow" }, "flowRunSettings": { "$ref": "#/components/schemas/FlowRunSettings" }, "flowRunResult": { "$ref": "#/components/schemas/FlowRunResult" }, "flowTestMode": { "$ref": "#/components/schemas/FlowTestMode" }, "flowTestInfos": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowTestInfo" }, "nullable": true }, "studioPortalEndpoint": { "type": "string", "nullable": true }, "flowId": { "type": "string", "nullable": true }, "flowName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "flowType": { "$ref": "#/components/schemas/FlowType" }, "experimentId": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" }, "owner": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" }, "flowResourceId": { "type": "string", "nullable": true }, "isArchived": { "type": "boolean" }, "flowDefinitionFilePath": { "type": "string", "nullable": true }, "vmSize": { "type": "string", "nullable": true }, "maxIdleTimeSeconds": { "type": "integer", "format": "int64", "nullable": true }, "identity": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowEnvironment": { "type": "object", "properties": { "image": { "type": "string", "nullable": true }, "python_requirements_txt": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowFeature": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "state": { "type": "object", "properties": { "Runtime": { "$ref": "#/components/schemas/FlowFeatureStateEnum" }, "Executor": { "$ref": "#/components/schemas/FlowFeatureStateEnum" }, "PFS": { "$ref": "#/components/schemas/FlowFeatureStateEnum" } }, "additionalProperties": false, "nullable": true } }, "additionalProperties": false }, "FlowFeatureStateEnum": { "enum": [ "Ready", "E2ETest" ], "type": "string" }, "FlowGraph": { "type": "object", "properties": { "nodes": { "type": "array", "items": { "$ref": "#/components/schemas/Node" }, "nullable": true }, "tools": { "type": "array", "items": { "$ref": "#/components/schemas/Tool" }, "nullable": true }, "codes": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "inputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowInputDefinition" }, "description": "This is a dictionary", "nullable": true }, "outputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowOutputDefinition" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "FlowGraphAnnotationNode": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "content": { "type": "string", "nullable": true }, "mentionedNodeNames": { "type": "array", "items": { "type": "string" }, "nullable": true }, "structuredContent": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowGraphLayout": { "type": "object", "properties": { "nodeLayouts": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowNodeLayout" }, "description": "This is a dictionary", "nullable": true }, "extendedData": { "type": "string", "nullable": true }, "annotationNodes": { "type": "array", "items": { "$ref": "#/components/schemas/FlowGraphAnnotationNode" }, "nullable": true }, "orientation": { "$ref": "#/components/schemas/Orientation" } }, "additionalProperties": false }, "FlowGraphReference": { "type": "object", "properties": { "flowGraph": { "$ref": "#/components/schemas/FlowGraph" }, "referenceResourceId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowIndexEntity": { "type": "object", "properties": { "schemaId": { "type": "string", "nullable": true }, "entityId": { "type": "string", "nullable": true }, "kind": { "$ref": "#/components/schemas/EntityKind" }, "annotations": { "$ref": "#/components/schemas/FlowAnnotations" }, "properties": { "$ref": "#/components/schemas/FlowProperties" }, "internal": { "$ref": "#/components/schemas/ExtensibleObject" }, "updateSequence": { "type": "integer", "format": "int64" }, "type": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true, "readOnly": true }, "entityContainerId": { "type": "string", "nullable": true, "readOnly": true }, "entityObjectId": { "type": "string", "nullable": true, "readOnly": true }, "resourceType": { "type": "string", "nullable": true, "readOnly": true }, "relationships": { "type": "array", "items": { "$ref": "#/components/schemas/Relationship" }, "nullable": true }, "assetId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowInputDefinition": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "type": { "$ref": "#/components/schemas/ValueType" }, "default": { "nullable": true }, "description": { "type": "string", "nullable": true }, "is_chat_input": { "type": "boolean" }, "is_chat_history": { "type": "boolean", "nullable": true } }, "additionalProperties": false }, "FlowLanguage": { "enum": [ "Python", "CSharp" ], "type": "string" }, "FlowNode": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "type": { "$ref": "#/components/schemas/ToolType" }, "source": { "$ref": "#/components/schemas/NodeSource" }, "inputs": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "use_variants": { "type": "boolean" }, "activate": { "$ref": "#/components/schemas/Activate" }, "comment": { "type": "string", "nullable": true }, "api": { "type": "string", "nullable": true }, "provider": { "type": "string", "nullable": true }, "connection": { "type": "string", "nullable": true }, "module": { "type": "string", "nullable": true }, "aggregation": { "type": "boolean" } }, "additionalProperties": false }, "FlowNodeLayout": { "type": "object", "properties": { "x": { "type": "number", "format": "float" }, "y": { "type": "number", "format": "float" }, "width": { "type": "number", "format": "float" }, "height": { "type": "number", "format": "float" }, "index": { "type": "integer", "format": "int32" }, "extendedData": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowNodeVariant": { "type": "object", "properties": { "default_variant_id": { "type": "string", "nullable": true }, "variants": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowVariantNode" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "FlowOutputDefinition": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "type": { "$ref": "#/components/schemas/ValueType" }, "description": { "type": "string", "nullable": true }, "reference": { "type": "string", "nullable": true }, "evaluation_only": { "type": "boolean" }, "is_chat_output": { "type": "boolean" } }, "additionalProperties": false }, "FlowPatchOperationType": { "enum": [ "ArchiveFlow", "RestoreFlow", "ExportFlowToFile" ], "type": "string" }, "FlowProperties": { "type": "object", "properties": { "flowId": { "type": "string", "nullable": true }, "experimentId": { "type": "string", "nullable": true }, "flowType": { "$ref": "#/components/schemas/FlowType" }, "flowDefinitionFilePath": { "type": "string", "nullable": true }, "creationContext": { "$ref": "#/components/schemas/CreationContext" } } }, "FlowRunBasePath": { "type": "object", "properties": { "outputDatastoreName": { "type": "string", "nullable": true }, "basePath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowRunInfo": { "type": "object", "properties": { "flowGraph": { "$ref": "#/components/schemas/FlowGraph" }, "flowGraphLayout": { "$ref": "#/components/schemas/FlowGraphLayout" }, "flowName": { "type": "string", "nullable": true }, "flowRunResourceId": { "type": "string", "nullable": true }, "flowRunId": { "type": "string", "nullable": true }, "flowRunDisplayName": { "type": "string", "nullable": true }, "batchInputs": { "type": "array", "items": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary" }, "nullable": true }, "batchDataInput": { "$ref": "#/components/schemas/BatchDataInput" }, "flowRunType": { "$ref": "#/components/schemas/FlowRunTypeEnum" }, "flowType": { "$ref": "#/components/schemas/FlowType" }, "runtimeName": { "type": "string", "nullable": true }, "bulkTestId": { "type": "string", "nullable": true }, "createdBy": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" }, "createdOn": { "type": "string", "format": "date-time", "nullable": true }, "inputsMapping": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "outputDatastoreName": { "type": "string", "nullable": true }, "childRunBasePath": { "type": "string", "nullable": true }, "workingDirectory": { "type": "string", "nullable": true }, "flowDagFileRelativePath": { "type": "string", "nullable": true }, "flowSnapshotId": { "type": "string", "nullable": true }, "studioPortalEndpoint": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowRunMode": { "enum": [ "Flow", "SingleNode", "FromNode", "BulkTest", "Eval", "PairwiseEval" ], "type": "string" }, "FlowRunResult": { "type": "object", "properties": { "flow_runs": { "type": "array", "items": { }, "nullable": true }, "node_runs": { "type": "array", "items": { }, "nullable": true }, "errorResponse": { "$ref": "#/components/schemas/ErrorResponse" }, "flowName": { "type": "string", "nullable": true }, "flowRunDisplayName": { "type": "string", "nullable": true }, "flowRunId": { "type": "string", "nullable": true }, "flowGraph": { "$ref": "#/components/schemas/FlowGraph" }, "flowGraphLayout": { "$ref": "#/components/schemas/FlowGraphLayout" }, "flowRunResourceId": { "type": "string", "nullable": true }, "bulkTestId": { "type": "string", "nullable": true }, "batchInputs": { "type": "array", "items": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary" }, "nullable": true }, "batchDataInput": { "$ref": "#/components/schemas/BatchDataInput" }, "createdBy": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" }, "createdOn": { "type": "string", "format": "date-time", "nullable": true }, "flowRunType": { "$ref": "#/components/schemas/FlowRunTypeEnum" }, "flowType": { "$ref": "#/components/schemas/FlowType" }, "runtimeName": { "type": "string", "nullable": true }, "amlComputeName": { "type": "string", "nullable": true }, "flowRunLogs": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "flowTestMode": { "$ref": "#/components/schemas/FlowTestMode" }, "flowTestInfos": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowTestInfo" }, "nullable": true }, "workingDirectory": { "type": "string", "nullable": true }, "flowDagFileRelativePath": { "type": "string", "nullable": true }, "flowSnapshotId": { "type": "string", "nullable": true }, "variantRunToEvaluationRunsIdMapping": { "type": "object", "additionalProperties": { "type": "array", "items": { "type": "string" }, "nullable": true }, "nullable": true } }, "additionalProperties": false }, "FlowRunSettings": { "type": "object", "properties": { "flowRunDisplayName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "runMode": { "$ref": "#/components/schemas/FlowRunMode" }, "batch_inputs": { "type": "array", "items": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary" }, "nullable": true }, "batchDataInput": { "$ref": "#/components/schemas/BatchDataInput" }, "tuningNodeNames": { "type": "array", "items": { "type": "string" }, "nullable": true }, "tuningNodeSettings": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/TuningNodeSetting" }, "description": "This is a dictionary", "nullable": true }, "baselineVariantId": { "type": "string", "nullable": true }, "defaultVariantId": { "type": "string", "nullable": true }, "variants": { "type": "object", "additionalProperties": { "type": "array", "items": { "$ref": "#/components/schemas/Node" } }, "description": "This is a dictionary", "nullable": true }, "variantsTools": { "type": "array", "items": { "$ref": "#/components/schemas/Tool" }, "nullable": true }, "variantsCodes": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "nodeName": { "type": "string", "nullable": true }, "bulkTestId": { "type": "string", "nullable": true }, "evaluationFlowRunSettings": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/EvaluationFlowRunSettings" }, "description": "This is a dictionary", "nullable": true }, "inputsMapping": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataInputs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "bulkTestFlowId": { "type": "string", "nullable": true }, "bulkTestFlowRunIds": { "type": "array", "items": { "type": "string" }, "nullable": true }, "amlComputeName": { "type": "string", "nullable": true }, "runtimeName": { "type": "string", "nullable": true }, "flowRunOutputDirectory": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowRunTypeEnum": { "enum": [ "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun" ], "type": "string" }, "FlowRuntimeCapability": { "type": "object", "properties": { "flowFeatures": { "type": "array", "items": { "$ref": "#/components/schemas/FlowFeature" }, "nullable": true } }, "additionalProperties": false }, "FlowRuntimeDto": { "type": "object", "properties": { "runtimeName": { "type": "string", "nullable": true }, "runtimeDescription": { "type": "string", "nullable": true }, "runtimeType": { "$ref": "#/components/schemas/RuntimeType" }, "environment": { "type": "string", "nullable": true }, "status": { "$ref": "#/components/schemas/RuntimeStatusEnum" }, "statusMessage": { "type": "string", "nullable": true }, "error": { "$ref": "#/components/schemas/ErrorResponse" }, "fromExistingEndpoint": { "type": "boolean" }, "endpointName": { "type": "string", "nullable": true }, "fromExistingDeployment": { "type": "boolean" }, "deploymentName": { "type": "string", "nullable": true }, "identity": { "$ref": "#/components/schemas/ManagedServiceIdentity" }, "instanceType": { "type": "string", "nullable": true }, "instanceCount": { "type": "integer", "format": "int32" }, "computeInstanceName": { "type": "string", "nullable": true }, "dockerImage": { "type": "string", "nullable": true }, "publishedPort": { "type": "integer", "format": "int32" }, "targetPort": { "type": "integer", "format": "int32" }, "fromExistingCustomApp": { "type": "boolean" }, "customAppName": { "type": "string", "nullable": true }, "assignedTo": { "$ref": "#/components/schemas/AssignedUser" }, "endpointUrl": { "type": "string", "nullable": true }, "createdOn": { "type": "string", "format": "date-time" }, "modifiedOn": { "type": "string", "format": "date-time" }, "owner": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" } }, "additionalProperties": false }, "FlowRuntimeSubmissionApiVersion": { "enum": [ "Version1", "Version2" ], "type": "string" }, "FlowSampleDto": { "type": "object", "properties": { "sampleResourceId": { "type": "string", "nullable": true }, "section": { "$ref": "#/components/schemas/Section" }, "indexNumber": { "type": "integer", "format": "int32", "nullable": true }, "flowName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "details": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "flow": { "$ref": "#/components/schemas/Flow" }, "flowDefinitionFilePath": { "type": "string", "nullable": true }, "flowType": { "$ref": "#/components/schemas/FlowType" }, "flowRunSettings": { "$ref": "#/components/schemas/FlowRunSettings" }, "isArchived": { "type": "boolean" }, "vmSize": { "type": "string", "nullable": true }, "maxIdleTimeSeconds": { "type": "integer", "format": "int64", "nullable": true }, "identity": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowSessionDto": { "type": "object", "properties": { "sessionId": { "type": "string", "nullable": true }, "baseImage": { "type": "string", "nullable": true }, "packages": { "type": "array", "items": { "type": "string" }, "nullable": true }, "vmSize": { "type": "string", "nullable": true }, "maxIdleTimeSeconds": { "type": "integer", "format": "int64", "nullable": true }, "flowFeatures": { "type": "array", "items": { "$ref": "#/components/schemas/FlowFeature" }, "nullable": true }, "runtimeName": { "type": "string", "nullable": true }, "runtimeDescription": { "type": "string", "nullable": true }, "runtimeType": { "$ref": "#/components/schemas/RuntimeType" }, "environment": { "type": "string", "nullable": true }, "status": { "$ref": "#/components/schemas/RuntimeStatusEnum" }, "statusMessage": { "type": "string", "nullable": true }, "error": { "$ref": "#/components/schemas/ErrorResponse" }, "fromExistingEndpoint": { "type": "boolean" }, "endpointName": { "type": "string", "nullable": true }, "fromExistingDeployment": { "type": "boolean" }, "deploymentName": { "type": "string", "nullable": true }, "identity": { "$ref": "#/components/schemas/ManagedServiceIdentity" }, "instanceType": { "type": "string", "nullable": true }, "instanceCount": { "type": "integer", "format": "int32" }, "computeInstanceName": { "type": "string", "nullable": true }, "dockerImage": { "type": "string", "nullable": true }, "publishedPort": { "type": "integer", "format": "int32" }, "targetPort": { "type": "integer", "format": "int32" }, "fromExistingCustomApp": { "type": "boolean" }, "customAppName": { "type": "string", "nullable": true }, "assignedTo": { "$ref": "#/components/schemas/AssignedUser" }, "endpointUrl": { "type": "string", "nullable": true }, "createdOn": { "type": "string", "format": "date-time" }, "modifiedOn": { "type": "string", "format": "date-time" }, "owner": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" } }, "additionalProperties": false }, "FlowSnapshot": { "type": "object", "properties": { "inputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowInputDefinition" }, "description": "This is a dictionary", "nullable": true }, "outputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowOutputDefinition" }, "description": "This is a dictionary", "nullable": true }, "nodes": { "type": "array", "items": { "$ref": "#/components/schemas/FlowNode" }, "nullable": true }, "node_variants": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowNodeVariant" }, "description": "This is a dictionary", "nullable": true }, "environment": { "$ref": "#/components/schemas/FlowEnvironment" }, "environment_variables": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary", "nullable": true }, "language": { "$ref": "#/components/schemas/FlowLanguage" } }, "additionalProperties": false }, "FlowSubmitRunSettings": { "type": "object", "properties": { "nodeInputs": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary", "nullable": true }, "flowRunDisplayName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "runMode": { "$ref": "#/components/schemas/FlowRunMode" }, "batch_inputs": { "type": "array", "items": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary" }, "nullable": true }, "batchDataInput": { "$ref": "#/components/schemas/BatchDataInput" }, "tuningNodeNames": { "type": "array", "items": { "type": "string" }, "nullable": true }, "tuningNodeSettings": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/TuningNodeSetting" }, "description": "This is a dictionary", "nullable": true }, "baselineVariantId": { "type": "string", "nullable": true }, "defaultVariantId": { "type": "string", "nullable": true }, "variants": { "type": "object", "additionalProperties": { "type": "array", "items": { "$ref": "#/components/schemas/Node" } }, "description": "This is a dictionary", "nullable": true }, "variantsTools": { "type": "array", "items": { "$ref": "#/components/schemas/Tool" }, "nullable": true }, "variantsCodes": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "nodeName": { "type": "string", "nullable": true }, "bulkTestId": { "type": "string", "nullable": true }, "evaluationFlowRunSettings": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/EvaluationFlowRunSettings" }, "description": "This is a dictionary", "nullable": true }, "inputsMapping": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataInputs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "bulkTestFlowId": { "type": "string", "nullable": true }, "bulkTestFlowRunIds": { "type": "array", "items": { "type": "string" }, "nullable": true }, "amlComputeName": { "type": "string", "nullable": true }, "runtimeName": { "type": "string", "nullable": true }, "flowRunOutputDirectory": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowTestInfo": { "type": "object", "properties": { "flowRunId": { "type": "string", "nullable": true }, "flowTestStorageSetting": { "$ref": "#/components/schemas/FlowTestStorageSetting" } }, "additionalProperties": false }, "FlowTestMode": { "enum": [ "Sync", "Async" ], "type": "string" }, "FlowTestStorageSetting": { "type": "object", "properties": { "storageAccountName": { "type": "string", "nullable": true }, "blobContainerName": { "type": "string", "nullable": true }, "flowArtifactsRootPath": { "type": "string", "nullable": true }, "outputDatastoreName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "FlowToolSettingParameter": { "type": "object", "properties": { "type": { "type": "array", "items": { "$ref": "#/components/schemas/ValueType" }, "nullable": true }, "default": { "type": "string", "nullable": true }, "advanced": { "type": "boolean", "nullable": true }, "enum": { "type": "array", "items": { }, "nullable": true }, "model_list": { "type": "array", "items": { "type": "string" }, "nullable": true }, "text_box_size": { "type": "integer", "format": "int32", "nullable": true }, "capabilities": { "$ref": "#/components/schemas/AzureOpenAIModelCapabilities" }, "allow_manual_entry": { "type": "boolean", "nullable": true } }, "additionalProperties": false }, "FlowToolsDto": { "type": "object", "properties": { "package": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/Tool" }, "description": "This is a dictionary", "nullable": true }, "code": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/Tool" }, "description": "This is a dictionary", "nullable": true }, "errors": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ErrorResponse" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "FlowType": { "enum": [ "Default", "Evaluation", "Chat", "Rag" ], "type": "string" }, "FlowVariantNode": { "type": "object", "properties": { "node": { "$ref": "#/components/schemas/FlowNode" }, "description": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ForecastHorizon": { "type": "object", "properties": { "mode": { "$ref": "#/components/schemas/ForecastHorizonMode" }, "value": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "ForecastHorizonMode": { "enum": [ "Auto", "Custom" ], "type": "string" }, "ForecastingSettings": { "type": "object", "properties": { "countryOrRegionForHolidays": { "type": "string", "nullable": true }, "timeColumnName": { "type": "string", "nullable": true }, "targetLags": { "$ref": "#/components/schemas/TargetLags" }, "targetRollingWindowSize": { "$ref": "#/components/schemas/TargetRollingWindowSize" }, "forecastHorizon": { "$ref": "#/components/schemas/ForecastHorizon" }, "timeSeriesIdColumnNames": { "type": "array", "items": { "type": "string" }, "nullable": true }, "frequency": { "type": "string", "nullable": true }, "featureLags": { "type": "string", "nullable": true }, "seasonality": { "$ref": "#/components/schemas/Seasonality" }, "shortSeriesHandlingConfig": { "$ref": "#/components/schemas/ShortSeriesHandlingConfiguration" }, "useStl": { "$ref": "#/components/schemas/UseStl" }, "targetAggregateFunction": { "$ref": "#/components/schemas/TargetAggregationFunction" }, "cvStepSize": { "type": "integer", "format": "int32", "nullable": true }, "featuresUnknownAtForecastTime": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "Framework": { "enum": [ "Python", "PySpark", "Cntk", "TensorFlow", "PyTorch", "PySparkInteractive", "R" ], "type": "string" }, "Frequency": { "enum": [ "Month", "Week", "Day", "Hour", "Minute" ], "type": "string" }, "GeneralSettings": { "type": "object", "properties": { "primaryMetric": { "$ref": "#/components/schemas/PrimaryMetrics" }, "taskType": { "$ref": "#/components/schemas/TaskType" }, "logVerbosity": { "$ref": "#/components/schemas/LogVerbosity" } }, "additionalProperties": false }, "GeneratePipelineComponentRequest": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "moduleScope": { "$ref": "#/components/schemas/ModuleScope" }, "isDeterministic": { "type": "boolean", "nullable": true }, "category": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "setAsDefaultVersion": { "type": "boolean" }, "registryName": { "type": "string", "nullable": true }, "graph": { "$ref": "#/components/schemas/GraphDraftEntity" }, "pipelineRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameterAssignment" }, "nullable": true }, "moduleNodeRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeRunSetting" }, "nullable": true }, "moduleNodeUIInputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeUIInputSetting" }, "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "continueRunOnStepFailure": { "type": "boolean", "nullable": true }, "description": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "enforceRerun": { "type": "boolean", "nullable": true }, "datasetAccessModes": { "$ref": "#/components/schemas/DatasetAccessModes" } }, "additionalProperties": false }, "GenerateToolMetaRequest": { "type": "object", "properties": { "tools": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ToolSourceMeta" }, "description": "This is a dictionary", "nullable": true }, "working_dir": { "type": "string", "nullable": true } }, "additionalProperties": false }, "GetDynamicListRequest": { "type": "object", "properties": { "func_path": { "type": "string", "nullable": true }, "func_kwargs": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "GetRunDataResultDto": { "type": "object", "properties": { "runMetadata": { "$ref": "#/components/schemas/RunDto" }, "runDefinition": { "nullable": true }, "jobSpecification": { "nullable": true }, "systemSettings": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } }, "additionalProperties": false }, "GetTrainingSessionDto": { "type": "object", "properties": { "properties": { "$ref": "#/components/schemas/SessionProperties" }, "compute": { "$ref": "#/components/schemas/ComputeContract" } }, "additionalProperties": false }, "GlobalJobDispatcherConfiguration": { "type": "object", "properties": { "vmSize": { "type": "array", "items": { "type": "string" }, "nullable": true }, "computeType": { "$ref": "#/components/schemas/GlobalJobDispatcherSupportedComputeType" }, "region": { "type": "array", "items": { "type": "string" }, "nullable": true }, "myResourceOnly": { "type": "boolean" }, "redispatchAllowed": { "type": "boolean", "nullable": true }, "lowPriorityVMTolerant": { "type": "boolean" }, "vcList": { "type": "array", "items": { "type": "string" }, "nullable": true }, "planId": { "type": "string", "nullable": true }, "planRegionId": { "type": "string", "nullable": true }, "vcBlockList": { "type": "array", "items": { "type": "string" }, "nullable": true }, "clusterBlockList": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "GlobalJobDispatcherSupportedComputeType": { "enum": [ "AmlCompute", "AmlK8s" ], "type": "string" }, "GlobsOptions": { "type": "object", "properties": { "globPatterns": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "GraphAnnotationNode": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "content": { "type": "string", "nullable": true }, "mentionedNodeNames": { "type": "array", "items": { "type": "string" }, "nullable": true }, "structuredContent": { "type": "string", "nullable": true } }, "additionalProperties": false }, "GraphComponentsMode": { "enum": [ "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin" ], "type": "string" }, "GraphControlNode": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "controlType": { "$ref": "#/components/schemas/ControlType" }, "controlParameter": { "$ref": "#/components/schemas/ParameterAssignment" }, "runAttribution": { "type": "string", "nullable": true } }, "additionalProperties": false }, "GraphControlReferenceNode": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "comment": { "type": "string", "nullable": true }, "controlFlowType": { "$ref": "#/components/schemas/ControlFlowType" }, "referenceNodeId": { "type": "string", "nullable": true }, "doWhileControlFlowInfo": { "$ref": "#/components/schemas/DoWhileControlFlowInfo" }, "parallelForControlFlowInfo": { "$ref": "#/components/schemas/ParallelForControlFlowInfo" }, "runAttribution": { "type": "string", "nullable": true } }, "additionalProperties": false }, "GraphDatasetNode": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "datasetId": { "type": "string", "nullable": true }, "dataPathParameterName": { "type": "string", "nullable": true }, "dataSetDefinition": { "$ref": "#/components/schemas/DataSetDefinition" } }, "additionalProperties": false }, "GraphDatasetsLoadModes": { "enum": [ "SkipDatasetsLoad", "V1RegisteredDataset", "V1SavedDataset", "PersistDatasetsInfo", "SubmissionNeededUpstreamDatasetOnly", "SubmissionNeededInCompleteDatasetOnly", "V2Asset", "Submission", "AllRegisteredData", "AllData" ], "type": "string" }, "GraphDraftEntity": { "type": "object", "properties": { "moduleNodes": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNode" }, "nullable": true }, "datasetNodes": { "type": "array", "items": { "$ref": "#/components/schemas/GraphDatasetNode" }, "nullable": true }, "subGraphNodes": { "type": "array", "items": { "$ref": "#/components/schemas/GraphReferenceNode" }, "nullable": true }, "controlReferenceNodes": { "type": "array", "items": { "$ref": "#/components/schemas/GraphControlReferenceNode" }, "nullable": true }, "controlNodes": { "type": "array", "items": { "$ref": "#/components/schemas/GraphControlNode" }, "nullable": true }, "edges": { "type": "array", "items": { "$ref": "#/components/schemas/GraphEdge" }, "nullable": true }, "entityInterface": { "$ref": "#/components/schemas/EntityInterface" }, "graphLayout": { "$ref": "#/components/schemas/GraphLayout" }, "createdBy": { "$ref": "#/components/schemas/CreatedBy" }, "lastUpdatedBy": { "$ref": "#/components/schemas/CreatedBy" }, "defaultCompute": { "$ref": "#/components/schemas/ComputeSetting" }, "defaultDatastore": { "$ref": "#/components/schemas/DatastoreSetting" }, "defaultCloudPriority": { "$ref": "#/components/schemas/CloudPrioritySetting" }, "extendedProperties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "parentSubGraphModuleIds": { "type": "array", "items": { "type": "string" }, "nullable": true }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "GraphEdge": { "type": "object", "properties": { "sourceOutputPort": { "$ref": "#/components/schemas/PortInfo" }, "destinationInputPort": { "$ref": "#/components/schemas/PortInfo" } }, "additionalProperties": false }, "GraphLayout": { "type": "object", "properties": { "nodeLayouts": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/NodeLayout" }, "description": "This is a dictionary", "nullable": true }, "extendedData": { "type": "string", "nullable": true }, "annotationNodes": { "type": "array", "items": { "$ref": "#/components/schemas/GraphAnnotationNode" }, "nullable": true }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "GraphLayoutCreationInfo": { "type": "object", "properties": { "nodeLayouts": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/NodeLayout" }, "description": "This is a dictionary", "nullable": true }, "extendedData": { "type": "string", "nullable": true }, "annotationNodes": { "type": "array", "items": { "$ref": "#/components/schemas/GraphAnnotationNode" }, "nullable": true } }, "additionalProperties": false }, "GraphModuleNode": { "type": "object", "properties": { "moduleType": { "$ref": "#/components/schemas/ModuleType" }, "runconfig": { "type": "string", "nullable": true }, "id": { "type": "string", "nullable": true }, "moduleId": { "type": "string", "nullable": true }, "comment": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "moduleParameters": { "type": "array", "items": { "$ref": "#/components/schemas/ParameterAssignment" }, "nullable": true }, "moduleMetadataParameters": { "type": "array", "items": { "$ref": "#/components/schemas/ParameterAssignment" }, "nullable": true }, "moduleOutputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/OutputSetting" }, "nullable": true }, "moduleInputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/InputSetting" }, "nullable": true }, "useGraphDefaultCompute": { "type": "boolean" }, "useGraphDefaultDatastore": { "type": "boolean" }, "regenerateOutput": { "type": "boolean" }, "controlInputs": { "type": "array", "items": { "$ref": "#/components/schemas/ControlInput" }, "nullable": true }, "cloudSettings": { "$ref": "#/components/schemas/CloudSettings" }, "executionPhase": { "$ref": "#/components/schemas/ExecutionPhase" }, "runAttribution": { "type": "string", "nullable": true } }, "additionalProperties": false }, "GraphModuleNodeRunSetting": { "type": "object", "properties": { "nodeId": { "type": "string", "nullable": true }, "moduleId": { "type": "string", "nullable": true }, "stepType": { "type": "string", "nullable": true }, "runSettings": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameterAssignment" }, "nullable": true } }, "additionalProperties": false }, "GraphModuleNodeUIInputSetting": { "type": "object", "properties": { "nodeId": { "type": "string", "nullable": true }, "moduleId": { "type": "string", "nullable": true }, "moduleInputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/UIInputSetting" }, "nullable": true } }, "additionalProperties": false }, "GraphNodeStatusInfo": { "type": "object", "properties": { "status": { "$ref": "#/components/schemas/TaskStatusCode" }, "runStatus": { "$ref": "#/components/schemas/RunStatus" }, "isBypassed": { "type": "boolean" }, "hasFailedChildRun": { "type": "boolean" }, "partiallyExecuted": { "type": "boolean" }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "aetherStartTime": { "type": "string", "format": "date-time", "nullable": true }, "aetherEndTime": { "type": "string", "format": "date-time", "nullable": true }, "aetherCreationTime": { "type": "string", "format": "date-time", "nullable": true }, "runHistoryStartTime": { "type": "string", "format": "date-time", "nullable": true }, "runHistoryEndTime": { "type": "string", "format": "date-time", "nullable": true }, "runHistoryCreationTime": { "type": "string", "format": "date-time", "nullable": true }, "reuseInfo": { "$ref": "#/components/schemas/TaskReuseInfo" }, "controlFlowInfo": { "$ref": "#/components/schemas/TaskControlFlowInfo" }, "statusCode": { "$ref": "#/components/schemas/TaskStatusCode" }, "statusDetail": { "type": "string", "nullable": true }, "creationTime": { "type": "string", "format": "date-time" }, "scheduleTime": { "type": "string", "format": "date-time", "nullable": true }, "startTime": { "type": "string", "format": "date-time", "nullable": true }, "endTime": { "type": "string", "format": "date-time", "nullable": true }, "requestId": { "type": "string", "nullable": true }, "runId": { "type": "string", "nullable": true }, "dataContainerId": { "type": "string", "nullable": true }, "realTimeLogPath": { "type": "string", "nullable": true }, "hasWarnings": { "type": "boolean" }, "compositeNodeId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "GraphReferenceNode": { "type": "object", "properties": { "graphId": { "type": "string", "nullable": true }, "defaultCompute": { "$ref": "#/components/schemas/ComputeSetting" }, "defaultDatastore": { "$ref": "#/components/schemas/DatastoreSetting" }, "id": { "type": "string", "nullable": true }, "moduleId": { "type": "string", "nullable": true }, "comment": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "moduleParameters": { "type": "array", "items": { "$ref": "#/components/schemas/ParameterAssignment" }, "nullable": true }, "moduleMetadataParameters": { "type": "array", "items": { "$ref": "#/components/schemas/ParameterAssignment" }, "nullable": true }, "moduleOutputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/OutputSetting" }, "nullable": true }, "moduleInputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/InputSetting" }, "nullable": true }, "useGraphDefaultCompute": { "type": "boolean" }, "useGraphDefaultDatastore": { "type": "boolean" }, "regenerateOutput": { "type": "boolean" }, "controlInputs": { "type": "array", "items": { "$ref": "#/components/schemas/ControlInput" }, "nullable": true }, "cloudSettings": { "$ref": "#/components/schemas/CloudSettings" }, "executionPhase": { "$ref": "#/components/schemas/ExecutionPhase" }, "runAttribution": { "type": "string", "nullable": true } }, "additionalProperties": false }, "GraphSdkCodeType": { "enum": [ "Python", "JupyterNotebook", "Unknown" ], "type": "string" }, "HdfsReference": { "type": "object", "properties": { "amlDataStoreName": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "HdiClusterComputeInfo": { "type": "object", "properties": { "address": { "type": "string", "nullable": true }, "username": { "type": "string", "nullable": true }, "password": { "type": "string", "nullable": true }, "privateKey": { "type": "string", "nullable": true } }, "additionalProperties": false }, "HdiConfiguration": { "type": "object", "properties": { "yarnDeployMode": { "$ref": "#/components/schemas/YarnDeployMode" } }, "additionalProperties": false }, "HdiRunConfiguration": { "type": "object", "properties": { "file": { "type": "string", "nullable": true }, "className": { "type": "string", "nullable": true }, "files": { "type": "array", "items": { "type": "string" }, "nullable": true }, "archives": { "type": "array", "items": { "type": "string" }, "nullable": true }, "jars": { "type": "array", "items": { "type": "string" }, "nullable": true }, "pyFiles": { "type": "array", "items": { "type": "string" }, "nullable": true }, "computeName": { "type": "string", "nullable": true }, "queue": { "type": "string", "nullable": true }, "driverMemory": { "type": "string", "nullable": true }, "driverCores": { "type": "integer", "format": "int32", "nullable": true }, "executorMemory": { "type": "string", "nullable": true }, "executorCores": { "type": "integer", "format": "int32", "nullable": true }, "numberExecutors": { "type": "integer", "format": "int32", "nullable": true }, "conf": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "name": { "type": "string", "nullable": true } }, "additionalProperties": false }, "HistoryConfiguration": { "type": "object", "properties": { "outputCollection": { "type": "boolean", "default": true }, "directoriesToWatch": { "type": "array", "items": { "type": "string" }, "default": [ "logs" ], "nullable": true }, "enableMLflowTracking": { "type": "boolean", "default": true } } }, "HttpStatusCode": { "enum": [ "Continue", "SwitchingProtocols", "Processing", "EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent", "ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed", "MultipleChoices", "Ambiguous", "MovedPermanently", "Moved", "Found", "Redirect", "SeeOther", "RedirectMethod", "NotModified", "UseProxy", "Unused", "TemporaryRedirect", "RedirectKeepVerb", "PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired", "Forbidden", "NotFound", "MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired", "RequestTimeout", "Conflict", "Gone", "LengthRequired", "PreconditionFailed", "RequestEntityTooLarge", "RequestUriTooLong", "UnsupportedMediaType", "RequestedRangeNotSatisfiable", "ExpectationFailed", "MisdirectedRequest", "UnprocessableEntity", "Locked", "FailedDependency", "UpgradeRequired", "PreconditionRequired", "TooManyRequests", "RequestHeaderFieldsTooLarge", "UnavailableForLegalReasons", "InternalServerError", "NotImplemented", "BadGateway", "ServiceUnavailable", "GatewayTimeout", "HttpVersionNotSupported", "VariantAlsoNegotiates", "InsufficientStorage", "LoopDetected", "NotExtended", "NetworkAuthenticationRequired" ], "type": "string" }, "HyperDriveConfiguration": { "type": "object", "properties": { "hyperDriveRunConfig": { "type": "string", "nullable": true }, "primaryMetricGoal": { "type": "string", "nullable": true }, "primaryMetricName": { "type": "string", "nullable": true }, "arguments": { "type": "array", "items": { "$ref": "#/components/schemas/ArgumentAssignment" }, "nullable": true } }, "additionalProperties": false }, "IActionResult": { "type": "object", "additionalProperties": false }, "ICheckableLongRunningOperationResponse": { "type": "object", "properties": { "completionResult": { "$ref": "#/components/schemas/LongRunningNullResponse" }, "location": { "type": "string", "format": "uri", "nullable": true }, "operationResult": { "type": "string", "format": "uri", "nullable": true } }, "additionalProperties": false }, "IdentityConfiguration": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/IdentityType" }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "secret": { "type": "string", "nullable": true } }, "additionalProperties": false }, "IdentitySetting": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/AEVAIdentityType" }, "clientId": { "type": "string", "format": "uuid", "nullable": true }, "objectId": { "type": "string", "format": "uuid", "nullable": true }, "msiResourceId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "IdentityType": { "enum": [ "Managed", "ServicePrincipal", "AMLToken" ], "type": "string" }, "ImportDataTask": { "type": "object", "properties": { "DataTransferSource": { "$ref": "#/components/schemas/DataTransferSource" } }, "additionalProperties": false }, "IndexedErrorResponse": { "type": "object", "properties": { "code": { "type": "string", "nullable": true }, "errorCodeHierarchy": { "type": "string", "nullable": true }, "message": { "type": "string", "nullable": true }, "time": { "type": "string", "format": "date-time", "nullable": true }, "componentName": { "type": "string", "nullable": true }, "severity": { "type": "integer", "format": "int32", "nullable": true }, "detailsUri": { "type": "string", "format": "uri", "nullable": true }, "referenceCode": { "type": "string", "nullable": true } } }, "InitScriptInfoDto": { "type": "object", "properties": { "dbfs": { "$ref": "#/components/schemas/DbfsStorageInfoDto" } }, "additionalProperties": false }, "InnerErrorDetails": { "type": "object", "properties": { "code": { "type": "string", "nullable": true }, "message": { "type": "string", "nullable": true }, "target": { "type": "string", "nullable": true } }, "additionalProperties": false }, "InnerErrorResponse": { "type": "object", "properties": { "code": { "type": "string", "description": "The error code.", "nullable": true }, "innerError": { "$ref": "#/components/schemas/InnerErrorResponse" } }, "additionalProperties": false, "description": "A nested structure of errors." }, "InputAsset": { "type": "object", "properties": { "asset": { "$ref": "#/components/schemas/Asset" }, "mechanism": { "$ref": "#/components/schemas/DeliveryMechanism" }, "environmentVariableName": { "type": "string", "nullable": true }, "pathOnCompute": { "type": "string", "nullable": true }, "overwrite": { "type": "boolean" }, "options": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } }, "additionalProperties": false }, "InputData": { "type": "object", "properties": { "datasetId": { "type": "string", "nullable": true }, "mode": { "$ref": "#/components/schemas/DataBindingMode" }, "value": { "type": "string", "nullable": true } }, "additionalProperties": false }, "InputDataBinding": { "type": "object", "properties": { "dataId": { "type": "string", "nullable": true }, "pathOnCompute": { "type": "string", "nullable": true }, "mode": { "$ref": "#/components/schemas/DataBindingMode" }, "description": { "type": "string", "nullable": true }, "uri": { "$ref": "#/components/schemas/MfeInternalUriReference" }, "value": { "type": "string", "nullable": true }, "assetUri": { "type": "string", "nullable": true }, "jobInputType": { "$ref": "#/components/schemas/JobInputType" } }, "additionalProperties": false }, "InputDefinition": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "type": { "type": "array", "items": { "$ref": "#/components/schemas/ValueType" }, "nullable": true }, "default": { "nullable": true }, "description": { "type": "string", "nullable": true }, "enum": { "type": "array", "items": { "type": "string" }, "nullable": true }, "enabled_by": { "type": "string", "nullable": true }, "enabled_by_type": { "type": "array", "items": { "$ref": "#/components/schemas/ValueType" }, "nullable": true }, "enabled_by_value": { "type": "array", "items": { }, "nullable": true }, "model_list": { "type": "array", "items": { "type": "string" }, "nullable": true }, "capabilities": { "$ref": "#/components/schemas/AzureOpenAIModelCapabilities" }, "dynamic_list": { "$ref": "#/components/schemas/ToolInputDynamicList" }, "allow_manual_entry": { "type": "boolean" }, "is_multi_select": { "type": "boolean" }, "generated_by": { "$ref": "#/components/schemas/ToolInputGeneratedBy" }, "input_type": { "$ref": "#/components/schemas/InputType" }, "advanced": { "type": "boolean", "nullable": true }, "ui_hints": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "InputOutputPortMetadata": { "type": "object", "properties": { "graphModuleNodeId": { "type": "string", "nullable": true }, "portName": { "type": "string", "nullable": true }, "schema": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "id": { "type": "string", "nullable": true, "readOnly": true } }, "additionalProperties": false }, "InputSetting": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "dataStoreMode": { "$ref": "#/components/schemas/AEVADataStoreMode" }, "pathOnCompute": { "type": "string", "nullable": true }, "options": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "additionalTransformations": { "type": "string", "nullable": true } }, "additionalProperties": false }, "InputType": { "enum": [ "default", "uionly_hidden" ], "type": "string" }, "IntellectualPropertyAccessMode": { "enum": [ "ReadOnly", "ReadWrite" ], "type": "string" }, "IntellectualPropertyPublisherInformation": { "type": "object", "properties": { "intellectualPropertyPublisher": { "type": "string", "nullable": true } }, "additionalProperties": false }, "InteractiveConfig": { "type": "object", "properties": { "isSSHEnabled": { "type": "boolean", "nullable": true }, "sshPublicKey": { "type": "string", "nullable": true }, "isIPythonEnabled": { "type": "boolean", "nullable": true }, "isTensorBoardEnabled": { "type": "boolean", "nullable": true }, "interactivePort": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "InteractiveConfiguration": { "type": "object", "properties": { "isSSHEnabled": { "type": "boolean", "nullable": true }, "sshPublicKey": { "type": "string", "nullable": true }, "isIPythonEnabled": { "type": "boolean", "nullable": true }, "isTensorBoardEnabled": { "type": "boolean", "nullable": true }, "interactivePort": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "JobCost": { "type": "object", "properties": { "chargedCpuCoreSeconds": { "type": "number", "format": "double", "nullable": true }, "chargedCpuMemoryMegabyteSeconds": { "type": "number", "format": "double", "nullable": true }, "chargedGpuSeconds": { "type": "number", "format": "double", "nullable": true }, "chargedNodeUtilizationSeconds": { "type": "number", "format": "double", "nullable": true } } }, "JobEndpoint": { "type": "object", "properties": { "type": { "type": "string", "nullable": true }, "port": { "type": "integer", "format": "int32", "nullable": true }, "endpoint": { "type": "string", "nullable": true }, "status": { "type": "string", "nullable": true }, "errorMessage": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "nodes": { "$ref": "#/components/schemas/MfeInternalNodes" } }, "additionalProperties": false }, "JobInput": { "required": [ "jobInputType" ], "type": "object", "properties": { "jobInputType": { "$ref": "#/components/schemas/JobInputType" }, "description": { "type": "string", "nullable": true } }, "additionalProperties": false }, "JobInputType": { "enum": [ "Dataset", "Uri", "Literal", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel" ], "type": "string" }, "JobLimitsType": { "enum": [ "Command", "Sweep" ], "type": "string" }, "JobOutput": { "required": [ "jobOutputType" ], "type": "object", "properties": { "jobOutputType": { "$ref": "#/components/schemas/JobOutputType" }, "description": { "type": "string", "nullable": true }, "autoDeleteSetting": { "$ref": "#/components/schemas/AutoDeleteSetting" } }, "additionalProperties": false }, "JobOutputArtifacts": { "type": "object", "properties": { "datastoreId": { "type": "string", "nullable": true }, "path": { "type": "string", "nullable": true } }, "additionalProperties": false }, "JobOutputType": { "enum": [ "Uri", "Dataset", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel" ], "type": "string" }, "JobProvisioningState": { "enum": [ "Succeeded", "Failed", "Canceled", "InProgress" ], "type": "string" }, "JobScheduleDto": { "type": "object", "properties": { "jobType": { "$ref": "#/components/schemas/JobType" }, "systemData": { "$ref": "#/components/schemas/SystemData" }, "name": { "type": "string", "nullable": true }, "jobDefinitionId": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "triggerType": { "$ref": "#/components/schemas/TriggerType" }, "recurrence": { "$ref": "#/components/schemas/Recurrence" }, "cron": { "$ref": "#/components/schemas/Cron" }, "status": { "$ref": "#/components/schemas/ScheduleStatus" }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "JobStatus": { "enum": [ "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled" ], "type": "string" }, "JobType": { "enum": [ "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base" ], "type": "string" }, "K8sConfiguration": { "type": "object", "properties": { "maxRetryCount": { "type": "integer", "format": "int32", "nullable": true }, "resourceConfiguration": { "$ref": "#/components/schemas/ResourceConfig" }, "priorityConfiguration": { "$ref": "#/components/schemas/PriorityConfig" }, "interactiveConfiguration": { "$ref": "#/components/schemas/InteractiveConfig" } }, "additionalProperties": false }, "KeyType": { "enum": [ "Primary", "Secondary" ], "type": "string" }, "KeyValuePairComponentNameMetaInfoErrorResponse": { "type": "object", "properties": { "key": { "$ref": "#/components/schemas/ComponentNameMetaInfo" }, "value": { "$ref": "#/components/schemas/ErrorResponse" } }, "additionalProperties": false }, "KeyValuePairComponentNameMetaInfoModuleDto": { "type": "object", "properties": { "key": { "$ref": "#/components/schemas/ComponentNameMetaInfo" }, "value": { "$ref": "#/components/schemas/ModuleDto" } }, "additionalProperties": false }, "KeyValuePairStringObject": { "type": "object", "properties": { "key": { "type": "string", "nullable": true }, "value": { "nullable": true } }, "additionalProperties": false }, "KubernetesConfiguration": { "type": "object", "properties": { "instanceType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "Kwarg": { "type": "object", "properties": { "key": { "type": "string", "nullable": true }, "value": { "type": "string", "nullable": true } }, "additionalProperties": false }, "LegacyDataPath": { "type": "object", "properties": { "dataStoreName": { "type": "string", "nullable": true }, "dataStoreMode": { "$ref": "#/components/schemas/AEVADataStoreMode" }, "relativePath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "LimitSettings": { "type": "object", "properties": { "maxTrials": { "type": "integer", "format": "int32", "nullable": true }, "timeout": { "type": "string", "format": "date-span", "nullable": true }, "trialTimeout": { "type": "string", "format": "date-span", "nullable": true }, "maxConcurrentTrials": { "type": "integer", "format": "int32", "nullable": true }, "maxCoresPerTrial": { "type": "integer", "format": "int32", "nullable": true }, "exitScore": { "type": "number", "format": "double", "nullable": true }, "enableEarlyTermination": { "type": "boolean", "nullable": true }, "maxNodes": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "LinkedADBWorkspaceMetadata": { "type": "object", "properties": { "workspaceId": { "type": "string", "nullable": true }, "region": { "type": "string", "nullable": true } }, "additionalProperties": false }, "LinkedPipelineInfo": { "type": "object", "properties": { "pipelineType": { "$ref": "#/components/schemas/PipelineType" }, "moduleNodeId": { "type": "string", "nullable": true }, "portName": { "type": "string", "nullable": true }, "linkedPipelineDraftId": { "type": "string", "nullable": true }, "linkedPipelineRunId": { "type": "string", "nullable": true }, "isDirectLink": { "type": "boolean" } }, "additionalProperties": false }, "ListViewType": { "enum": [ "ActiveOnly", "ArchivedOnly", "All" ], "type": "string" }, "LoadFlowAsComponentRequest": { "type": "object", "properties": { "componentName": { "type": "string", "nullable": true }, "componentVersion": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "isDeterministic": { "type": "boolean" }, "flowDefinitionFilePath": { "type": "string", "nullable": true }, "flowDefinitionResourceId": { "type": "string", "nullable": true }, "flowDefinitionDataStoreName": { "type": "string", "nullable": true }, "flowDefinitionBlobPath": { "type": "string", "nullable": true }, "flowDefinitionDataUri": { "type": "string", "nullable": true }, "nodeVariant": { "type": "string", "nullable": true }, "inputsMapping": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "connections": { "type": "object", "additionalProperties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary" }, "description": "This is a dictionary", "nullable": true }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "runtimeName": { "type": "string", "nullable": true }, "sessionId": { "type": "string", "nullable": true }, "vmSize": { "type": "string", "nullable": true }, "maxIdleTimeSeconds": { "type": "integer", "format": "int64", "nullable": true } }, "additionalProperties": false }, "LogLevel": { "enum": [ "Trace", "Debug", "Information", "Warning", "Error", "Critical", "None" ], "type": "string" }, "LogRunTerminatedEventDto": { "type": "object", "properties": { "nextActionIntervalInSeconds": { "type": "integer", "format": "int32", "nullable": true }, "actionType": { "$ref": "#/components/schemas/ActionType" }, "lastCheckedTime": { "type": "string", "format": "date-time", "nullable": true } }, "additionalProperties": false }, "LogVerbosity": { "enum": [ "NotSet", "Debug", "Info", "Warning", "Error", "Critical" ], "type": "string" }, "LongRunningNullResponse": { "type": "object", "additionalProperties": false }, "LongRunningOperationUriResponse": { "type": "object", "properties": { "location": { "type": "string", "format": "uri", "nullable": true }, "operationResult": { "type": "string", "format": "uri", "nullable": true } }, "additionalProperties": false }, "LongRunningUpdateRegistryComponentRequest": { "type": "object", "properties": { "displayName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "registryName": { "type": "string", "nullable": true }, "componentName": { "type": "string", "nullable": true }, "componentVersion": { "type": "string", "nullable": true }, "updateType": { "$ref": "#/components/schemas/LongRunningUpdateType" } }, "additionalProperties": false }, "LongRunningUpdateType": { "enum": [ "EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags" ], "type": "string" }, "MLFlowAutologgerState": { "enum": [ "Enabled", "Disabled" ], "type": "string" }, "ManagedServiceIdentity": { "required": [ "type" ], "type": "object", "properties": { "type": { "$ref": "#/components/schemas/ManagedServiceIdentityType" }, "principalId": { "type": "string", "format": "uuid" }, "tenantId": { "type": "string", "format": "uuid" }, "userAssignedIdentities": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/UserAssignedIdentity" }, "nullable": true } }, "additionalProperties": false }, "ManagedServiceIdentityType": { "enum": [ "SystemAssigned", "UserAssigned", "SystemAssignedUserAssigned", "None" ], "type": "string" }, "MavenLibraryDto": { "type": "object", "properties": { "coordinates": { "type": "string", "nullable": true }, "repo": { "type": "string", "nullable": true }, "exclusions": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "MetricProperties": { "type": "object", "properties": { "uxMetricType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "MetricSchemaDto": { "type": "object", "properties": { "numProperties": { "type": "integer", "format": "int32" }, "properties": { "type": "array", "items": { "$ref": "#/components/schemas/MetricSchemaPropertyDto" }, "nullable": true } }, "additionalProperties": false }, "MetricSchemaPropertyDto": { "type": "object", "properties": { "propertyId": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "type": { "type": "string", "nullable": true } }, "additionalProperties": false }, "MetricV2Dto": { "type": "object", "properties": { "dataContainerId": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "columns": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/MetricValueType" }, "description": "This is a dictionary", "nullable": true }, "properties": { "$ref": "#/components/schemas/MetricProperties" }, "namespace": { "type": "string", "nullable": true }, "standardSchemaId": { "type": "string", "format": "uuid", "nullable": true }, "value": { "type": "array", "items": { "$ref": "#/components/schemas/MetricV2Value" }, "nullable": true }, "continuationToken": { "type": "string", "description": "The token used in retrieving the next page. If null, there are no additional pages.", "nullable": true }, "nextLink": { "type": "string", "description": "The link to the next page constructed using the continuationToken. If null, there are no additional pages.", "nullable": true } }, "additionalProperties": false }, "MetricV2Value": { "type": "object", "properties": { "metricId": { "type": "string", "nullable": true }, "createdUtc": { "type": "string", "format": "date-time" }, "step": { "type": "integer", "format": "int64", "nullable": true }, "data": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "sasUri": { "type": "string", "format": "uri", "nullable": true } }, "additionalProperties": false }, "MetricValueType": { "enum": [ "Int", "Double", "String", "Bool", "Artifact", "Histogram", "Malformed" ], "type": "string" }, "MfeInternalAutologgerSettings": { "type": "object", "properties": { "mlflowAutologger": { "$ref": "#/components/schemas/MfeInternalMLFlowAutologgerState" } }, "additionalProperties": false }, "MfeInternalIdentityConfiguration": { "type": "object", "properties": { "identityType": { "$ref": "#/components/schemas/MfeInternalIdentityType" } }, "additionalProperties": false }, "MfeInternalIdentityType": { "enum": [ "Managed", "AMLToken", "UserIdentity" ], "type": "string" }, "MfeInternalMLFlowAutologgerState": { "enum": [ "Enabled", "Disabled" ], "type": "string" }, "MfeInternalNodes": { "type": "object", "properties": { "nodesValueType": { "$ref": "#/components/schemas/MfeInternalNodesValueType" } }, "additionalProperties": false }, "MfeInternalNodesValueType": { "enum": [ "All" ], "type": "string" }, "MfeInternalOutputData": { "type": "object", "properties": { "datasetName": { "type": "string", "nullable": true }, "datastore": { "type": "string", "nullable": true }, "datapath": { "type": "string", "nullable": true }, "mode": { "$ref": "#/components/schemas/DataBindingMode" } }, "additionalProperties": false }, "MfeInternalPipelineType": { "enum": [ "AzureML" ], "type": "string" }, "MfeInternalScheduleStatus": { "enum": [ "Enabled", "Disabled" ], "type": "string" }, "MfeInternalSecretConfiguration": { "type": "object", "properties": { "workspaceSecretName": { "type": "string", "nullable": true }, "uri": { "type": "string", "nullable": true } }, "additionalProperties": false }, "MfeInternalUriReference": { "type": "object", "properties": { "file": { "type": "string", "nullable": true }, "folder": { "type": "string", "nullable": true } }, "additionalProperties": false }, "MfeInternalV20211001ComponentJob": { "type": "object", "properties": { "computeId": { "type": "string", "nullable": true }, "componentId": { "type": "string", "nullable": true }, "inputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/JobInput" }, "description": "This is a dictionary", "nullable": true }, "outputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/JobOutput" }, "description": "This is a dictionary", "nullable": true }, "overrides": { "nullable": true } }, "additionalProperties": false }, "MinMaxParameterRule": { "type": "object", "properties": { "min": { "type": "number", "format": "double", "nullable": true }, "max": { "type": "number", "format": "double", "nullable": true } }, "additionalProperties": false }, "MlcComputeInfo": { "type": "object", "properties": { "mlcComputeType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ModelDto": { "type": "object", "properties": { "feedName": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "amlDataStoreName": { "type": "string", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "id": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "systemData": { "$ref": "#/components/schemas/SystemData" }, "armId": { "type": "string", "nullable": true }, "onlineEndpointYamlStr": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ModelManagementErrorResponse": { "type": "object", "properties": { "code": { "type": "string", "nullable": true }, "statusCode": { "type": "integer", "format": "int32" }, "message": { "type": "string", "nullable": true }, "target": { "type": "string", "nullable": true }, "details": { "type": "array", "items": { "$ref": "#/components/schemas/InnerErrorDetails" }, "nullable": true }, "correlation": { "type": "object", "additionalProperties": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "ModifyPipelineJobScheduleDto": { "type": "object", "properties": { "pipelineJobName": { "type": "string", "nullable": true }, "pipelineJobRuntimeSettings": { "$ref": "#/components/schemas/PipelineJobRuntimeBasicSettings" }, "displayName": { "type": "string", "nullable": true }, "triggerType": { "$ref": "#/components/schemas/TriggerType" }, "recurrence": { "$ref": "#/components/schemas/Recurrence" }, "cron": { "$ref": "#/components/schemas/Cron" }, "status": { "$ref": "#/components/schemas/ScheduleStatus" }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "ModuleDto": { "type": "object", "properties": { "namespace": { "type": "string", "nullable": true }, "tags": { "type": "array", "items": { "type": "string" }, "nullable": true }, "displayName": { "type": "string", "nullable": true }, "dictTags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "moduleVersionId": { "type": "string", "nullable": true }, "feedName": { "type": "string", "nullable": true }, "registryName": { "type": "string", "nullable": true }, "moduleName": { "type": "string", "nullable": true }, "moduleVersion": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "owner": { "type": "string", "nullable": true }, "jobType": { "type": "string", "nullable": true }, "defaultVersion": { "type": "string", "nullable": true }, "familyId": { "type": "string", "nullable": true }, "helpDocument": { "type": "string", "nullable": true }, "codegenBy": { "type": "string", "nullable": true }, "armId": { "type": "string", "nullable": true }, "moduleScope": { "$ref": "#/components/schemas/ModuleScope" }, "moduleEntity": { "$ref": "#/components/schemas/ModuleEntity" }, "inputTypes": { "type": "array", "items": { "type": "string" }, "nullable": true }, "outputTypes": { "type": "array", "items": { "type": "string" }, "nullable": true }, "entityStatus": { "$ref": "#/components/schemas/EntityStatus" }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" }, "yamlLink": { "type": "string", "nullable": true }, "yamlLinkWithCommitSha": { "type": "string", "nullable": true }, "moduleSourceType": { "$ref": "#/components/schemas/ModuleSourceType" }, "registeredBy": { "type": "string", "nullable": true }, "versions": { "type": "array", "items": { "$ref": "#/components/schemas/AzureMLModuleVersionDescriptor" }, "nullable": true }, "isDefaultModuleVersion": { "type": "boolean", "nullable": true }, "systemData": { "$ref": "#/components/schemas/SystemData" }, "systemMeta": { "$ref": "#/components/schemas/SystemMeta" }, "snapshotId": { "type": "string", "nullable": true }, "entry": { "type": "string", "nullable": true }, "osType": { "type": "string", "nullable": true }, "requireGpu": { "type": "boolean", "nullable": true }, "modulePythonInterface": { "$ref": "#/components/schemas/ModulePythonInterface" }, "environmentAssetId": { "type": "string", "nullable": true }, "runSettingParameters": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameter" }, "nullable": true }, "supportedUIInputDataDeliveryModes": { "type": "object", "additionalProperties": { "type": "array", "items": { "$ref": "#/components/schemas/UIInputDataDeliveryMode" }, "nullable": true }, "nullable": true }, "outputSettingSpecs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/OutputSettingSpec" }, "nullable": true }, "yamlStr": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ModuleDtoFields": { "enum": [ "Definition", "YamlStr", "RegistrationContext", "RunSettingParameters", "RunDefinition", "All", "Default", "Basic", "Minimal" ], "type": "string" }, "ModuleDtoWithErrors": { "type": "object", "properties": { "versionIdToModuleDto": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ModuleDto" }, "description": "This is a dictionary", "nullable": true }, "nameAndVersionToModuleDto": { "type": "array", "items": { "$ref": "#/components/schemas/KeyValuePairComponentNameMetaInfoModuleDto" }, "nullable": true }, "versionIdToError": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ErrorResponse" }, "description": "This is a dictionary", "nullable": true }, "nameAndVersionToError": { "type": "array", "items": { "$ref": "#/components/schemas/KeyValuePairComponentNameMetaInfoErrorResponse" }, "nullable": true } }, "additionalProperties": false }, "ModuleDtoWithValidateStatus": { "type": "object", "properties": { "existingModuleEntity": { "$ref": "#/components/schemas/ModuleEntity" }, "status": { "$ref": "#/components/schemas/ModuleInfoFromYamlStatusEnum" }, "statusDetails": { "type": "string", "nullable": true }, "errorDetails": { "type": "array", "items": { "type": "string" }, "nullable": true }, "serializedModuleInfo": { "type": "string", "nullable": true }, "namespace": { "type": "string", "nullable": true }, "tags": { "type": "array", "items": { "type": "string" }, "nullable": true }, "displayName": { "type": "string", "nullable": true }, "dictTags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "moduleVersionId": { "type": "string", "nullable": true }, "feedName": { "type": "string", "nullable": true }, "registryName": { "type": "string", "nullable": true }, "moduleName": { "type": "string", "nullable": true }, "moduleVersion": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "owner": { "type": "string", "nullable": true }, "jobType": { "type": "string", "nullable": true }, "defaultVersion": { "type": "string", "nullable": true }, "familyId": { "type": "string", "nullable": true }, "helpDocument": { "type": "string", "nullable": true }, "codegenBy": { "type": "string", "nullable": true }, "armId": { "type": "string", "nullable": true }, "moduleScope": { "$ref": "#/components/schemas/ModuleScope" }, "moduleEntity": { "$ref": "#/components/schemas/ModuleEntity" }, "inputTypes": { "type": "array", "items": { "type": "string" }, "nullable": true }, "outputTypes": { "type": "array", "items": { "type": "string" }, "nullable": true }, "entityStatus": { "$ref": "#/components/schemas/EntityStatus" }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" }, "yamlLink": { "type": "string", "nullable": true }, "yamlLinkWithCommitSha": { "type": "string", "nullable": true }, "moduleSourceType": { "$ref": "#/components/schemas/ModuleSourceType" }, "registeredBy": { "type": "string", "nullable": true }, "versions": { "type": "array", "items": { "$ref": "#/components/schemas/AzureMLModuleVersionDescriptor" }, "nullable": true }, "isDefaultModuleVersion": { "type": "boolean", "nullable": true }, "systemData": { "$ref": "#/components/schemas/SystemData" }, "systemMeta": { "$ref": "#/components/schemas/SystemMeta" }, "snapshotId": { "type": "string", "nullable": true }, "entry": { "type": "string", "nullable": true }, "osType": { "type": "string", "nullable": true }, "requireGpu": { "type": "boolean", "nullable": true }, "modulePythonInterface": { "$ref": "#/components/schemas/ModulePythonInterface" }, "environmentAssetId": { "type": "string", "nullable": true }, "runSettingParameters": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameter" }, "nullable": true }, "supportedUIInputDataDeliveryModes": { "type": "object", "additionalProperties": { "type": "array", "items": { "$ref": "#/components/schemas/UIInputDataDeliveryMode" }, "nullable": true }, "nullable": true }, "outputSettingSpecs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/OutputSettingSpec" }, "nullable": true }, "yamlStr": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ModuleEntity": { "type": "object", "properties": { "displayName": { "type": "string", "nullable": true }, "moduleExecutionType": { "type": "string", "nullable": true }, "moduleType": { "$ref": "#/components/schemas/ModuleType" }, "moduleTypeVersion": { "type": "string", "nullable": true }, "uploadState": { "$ref": "#/components/schemas/UploadState" }, "isDeterministic": { "type": "boolean" }, "structuredInterface": { "$ref": "#/components/schemas/StructuredInterface" }, "dataLocation": { "$ref": "#/components/schemas/DataLocation" }, "identifierHash": { "type": "string", "nullable": true }, "identifierHashV2": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "createdBy": { "$ref": "#/components/schemas/CreatedBy" }, "lastUpdatedBy": { "$ref": "#/components/schemas/CreatedBy" }, "runconfig": { "type": "string", "nullable": true }, "cloudSettings": { "$ref": "#/components/schemas/CloudSettings" }, "category": { "type": "string", "nullable": true }, "stepType": { "type": "string", "nullable": true }, "stage": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "hash": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "entityStatus": { "$ref": "#/components/schemas/EntityStatus" }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "ModuleInfoFromYamlStatusEnum": { "enum": [ "NewModule", "NewVersion", "Conflict", "ParseError", "ProcessRequestError" ], "type": "string" }, "ModulePythonInterface": { "type": "object", "properties": { "inputs": { "type": "array", "items": { "$ref": "#/components/schemas/PythonInterfaceMapping" }, "nullable": true }, "outputs": { "type": "array", "items": { "$ref": "#/components/schemas/PythonInterfaceMapping" }, "nullable": true }, "parameters": { "type": "array", "items": { "$ref": "#/components/schemas/PythonInterfaceMapping" }, "nullable": true } }, "additionalProperties": false }, "ModuleRunSettingTypes": { "enum": [ "All", "Released", "Default", "Testing", "Legacy", "Preview", "UxFull", "Integration", "UxIntegration", "Full" ], "type": "string" }, "ModuleScope": { "enum": [ "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated" ], "type": "string" }, "ModuleSourceType": { "enum": [ "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo" ], "type": "string" }, "ModuleType": { "enum": [ "None", "BatchInferencing" ], "type": "string" }, "ModuleUpdateOperationType": { "enum": [ "SetDefaultVersion", "EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags" ], "type": "string" }, "ModuleWorkingMechanism": { "enum": [ "Normal", "OutputToDataset" ], "type": "string" }, "MpiConfiguration": { "type": "object", "properties": { "processCountPerNode": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "NCrossValidationMode": { "enum": [ "Auto", "Custom" ], "type": "string" }, "NCrossValidations": { "type": "object", "properties": { "mode": { "$ref": "#/components/schemas/NCrossValidationMode" }, "value": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "Node": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "type": { "$ref": "#/components/schemas/ToolType" }, "source": { "$ref": "#/components/schemas/NodeSource" }, "inputs": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "tool": { "type": "string", "nullable": true }, "reduce": { "type": "boolean" }, "activate": { "$ref": "#/components/schemas/Activate" }, "comment": { "type": "string", "nullable": true }, "api": { "type": "string", "nullable": true }, "provider": { "type": "string", "nullable": true }, "connection": { "type": "string", "nullable": true }, "module": { "type": "string", "nullable": true } }, "additionalProperties": false }, "NodeCompositionMode": { "enum": [ "None", "OnlySequential", "Full" ], "type": "string" }, "NodeInputPort": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "documentation": { "type": "string", "nullable": true }, "dataTypesIds": { "type": "array", "items": { "type": "string" }, "nullable": true }, "isOptional": { "type": "boolean" } }, "additionalProperties": false }, "NodeLayout": { "type": "object", "properties": { "x": { "type": "number", "format": "float" }, "y": { "type": "number", "format": "float" }, "width": { "type": "number", "format": "float" }, "height": { "type": "number", "format": "float" }, "extendedData": { "type": "string", "nullable": true } }, "additionalProperties": false }, "NodeOutputPort": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "documentation": { "type": "string", "nullable": true }, "dataTypeId": { "type": "string", "nullable": true }, "passThroughInputName": { "type": "string", "nullable": true }, "EarlyAvailable": { "type": "boolean" }, "dataStoreMode": { "$ref": "#/components/schemas/AEVADataStoreMode" } }, "additionalProperties": false }, "NodePortInterface": { "type": "object", "properties": { "inputs": { "type": "array", "items": { "$ref": "#/components/schemas/NodeInputPort" }, "nullable": true }, "outputs": { "type": "array", "items": { "$ref": "#/components/schemas/NodeOutputPort" }, "nullable": true }, "controlOutputs": { "type": "array", "items": { "$ref": "#/components/schemas/ControlOutput" }, "nullable": true } }, "additionalProperties": false }, "NodeSource": { "type": "object", "properties": { "type": { "type": "string", "nullable": true }, "tool": { "type": "string", "nullable": true }, "path": { "type": "string", "nullable": true } }, "additionalProperties": false }, "NodeTelemetryMetaInfo": { "type": "object", "properties": { "pipelineRunId": { "type": "string", "nullable": true }, "nodeId": { "type": "string", "nullable": true }, "versionId": { "type": "string", "nullable": true }, "nodeType": { "type": "string", "nullable": true }, "nodeSource": { "type": "string", "nullable": true }, "isAnonymous": { "type": "boolean" }, "isPipelineComponent": { "type": "boolean" } }, "additionalProperties": false }, "NodeVariant": { "type": "object", "properties": { "variants": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/VariantNode" }, "description": "This is a dictionary", "nullable": true }, "defaultVariantId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "Nodes": { "required": [ "nodes_value_type" ], "type": "object", "properties": { "nodes_value_type": { "$ref": "#/components/schemas/NodesValueType" }, "values": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true } }, "additionalProperties": false }, "NodesValueType": { "enum": [ "All", "Custom" ], "type": "string" }, "NoteBookTaskDto": { "type": "object", "properties": { "notebook_path": { "type": "string", "nullable": true }, "base_parameters": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } }, "additionalProperties": false }, "NotificationSetting": { "type": "object", "properties": { "emails": { "type": "array", "items": { "type": "string" }, "nullable": true }, "emailOn": { "type": "array", "items": { "$ref": "#/components/schemas/EmailNotificationEnableType" }, "nullable": true }, "webhooks": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/Webhook" }, "nullable": true } }, "additionalProperties": false }, "ODataError": { "type": "object", "properties": { "code": { "type": "string", "description": "Gets or sets a language-independent, service-defined error code.\r\nThis code serves as a sub-status for the HTTP error code specified\r\nin the response.", "nullable": true }, "message": { "type": "string", "description": "Gets or sets a human-readable, language-dependent representation of the error.\r\nThe `Content-Language` header MUST contain the language code from [RFC5646]\r\ncorresponding to the language in which the value for message is written.", "nullable": true }, "target": { "type": "string", "description": "Gets or sets the target of the particular error\r\n(for example, the name of the property in error).", "nullable": true }, "details": { "type": "array", "items": { "$ref": "#/components/schemas/ODataErrorDetail" }, "description": "Gets or sets additional details about the error.", "nullable": true }, "innererror": { "$ref": "#/components/schemas/ODataInnerError" } }, "additionalProperties": false, "description": "Represents OData v4 error object." }, "ODataErrorDetail": { "type": "object", "properties": { "code": { "type": "string", "description": "Gets or sets a language-independent, service-defined error code.", "nullable": true }, "message": { "type": "string", "description": "Gets or sets a human-readable, language-dependent representation of the error.", "nullable": true }, "target": { "type": "string", "description": "Gets or sets the target of the particular error\r\n(for example, the name of the property in error).", "nullable": true } }, "additionalProperties": false, "description": "Represents additional error details." }, "ODataErrorResponse": { "type": "object", "properties": { "error": { "$ref": "#/components/schemas/ODataError" } }, "additionalProperties": false, "description": "Represents OData v4 compliant error response message." }, "ODataInnerError": { "type": "object", "properties": { "clientRequestId": { "type": "string", "description": "Gets or sets the client provided request ID.", "nullable": true }, "serviceRequestId": { "type": "string", "description": "Gets or sets the server generated request ID.", "nullable": true }, "trace": { "type": "string", "description": "Gets or sets the exception stack trace.\r\nDO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.", "nullable": true }, "context": { "type": "string", "description": "Gets or sets additional context for the exception.\r\nDO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.", "nullable": true } }, "additionalProperties": false, "description": "The contents of this object are service-defined.\r\nUsually this object contains information that will help debug the service\r\nand SHOULD only be used in development environments in order to guard\r\nagainst potential security concerns around information disclosure." }, "Orientation": { "enum": [ "Horizontal", "Vertical" ], "type": "string" }, "OutputData": { "type": "object", "properties": { "outputLocation": { "$ref": "#/components/schemas/ExecutionDataLocation" }, "mechanism": { "$ref": "#/components/schemas/OutputMechanism" }, "additionalOptions": { "$ref": "#/components/schemas/OutputOptions" }, "environmentVariableName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "OutputDataBinding": { "type": "object", "properties": { "datastoreId": { "type": "string", "nullable": true }, "pathOnDatastore": { "type": "string", "nullable": true }, "pathOnCompute": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "uri": { "$ref": "#/components/schemas/MfeInternalUriReference" }, "mode": { "$ref": "#/components/schemas/DataBindingMode" }, "assetUri": { "type": "string", "nullable": true }, "isAssetJobOutput": { "type": "boolean", "nullable": true }, "jobOutputType": { "$ref": "#/components/schemas/JobOutputType" }, "assetName": { "type": "string", "nullable": true }, "assetVersion": { "type": "string", "nullable": true }, "autoDeleteSetting": { "$ref": "#/components/schemas/AutoDeleteSetting" } }, "additionalProperties": false }, "OutputDatasetLineage": { "type": "object", "properties": { "identifier": { "$ref": "#/components/schemas/DatasetIdentifier" }, "outputType": { "$ref": "#/components/schemas/DatasetOutputType" }, "outputDetails": { "$ref": "#/components/schemas/DatasetOutputDetails" } }, "additionalProperties": false }, "OutputDefinition": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "type": { "type": "array", "items": { "$ref": "#/components/schemas/ValueType" }, "nullable": true }, "description": { "type": "string", "nullable": true }, "isProperty": { "type": "boolean" } }, "additionalProperties": false }, "OutputMechanism": { "enum": [ "Upload", "Mount", "Hdfs", "Link", "Direct" ], "type": "string" }, "OutputOptions": { "type": "object", "properties": { "pathOnCompute": { "type": "string", "nullable": true }, "registrationOptions": { "$ref": "#/components/schemas/RegistrationOptions" }, "uploadOptions": { "$ref": "#/components/schemas/UploadOptions" }, "mountOptions": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } }, "additionalProperties": false }, "OutputSetting": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "dataStoreName": { "type": "string", "nullable": true }, "DataStoreNameParameterAssignment": { "$ref": "#/components/schemas/ParameterAssignment" }, "dataStoreMode": { "$ref": "#/components/schemas/AEVADataStoreMode" }, "DataStoreModeParameterAssignment": { "$ref": "#/components/schemas/ParameterAssignment" }, "pathOnCompute": { "type": "string", "nullable": true }, "PathOnComputeParameterAssignment": { "$ref": "#/components/schemas/ParameterAssignment" }, "overwrite": { "type": "boolean" }, "dataReferenceName": { "type": "string", "nullable": true }, "webServicePort": { "type": "string", "nullable": true }, "datasetRegistration": { "$ref": "#/components/schemas/DatasetRegistration" }, "datasetOutputOptions": { "$ref": "#/components/schemas/DatasetOutputOptions" }, "AssetOutputSettings": { "$ref": "#/components/schemas/AssetOutputSettings" }, "parameterName": { "type": "string", "nullable": true }, "AssetOutputSettingsParameterName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "OutputSettingSpec": { "type": "object", "properties": { "supportedDataStoreModes": { "type": "array", "items": { "$ref": "#/components/schemas/AEVADataStoreMode" }, "nullable": true }, "defaultAssetOutputPath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "PaginatedDataInfoList": { "type": "object", "properties": { "value": { "type": "array", "items": { "$ref": "#/components/schemas/DataInfo" }, "description": "An array of objects of type DataInfo.", "nullable": true }, "continuationToken": { "type": "string", "description": "The token used in retrieving the next page. If null, there are no additional pages.", "nullable": true }, "nextLink": { "type": "string", "description": "The link to the next page constructed using the continuationToken. If null, there are no additional pages.", "nullable": true } }, "additionalProperties": false, "description": "A paginated list of DataInfos." }, "PaginatedModelDtoList": { "type": "object", "properties": { "value": { "type": "array", "items": { "$ref": "#/components/schemas/ModelDto" }, "description": "An array of objects of type ModelDto.", "nullable": true }, "continuationToken": { "type": "string", "description": "The token used in retrieving the next page. If null, there are no additional pages.", "nullable": true }, "nextLink": { "type": "string", "description": "The link to the next page constructed using the continuationToken. If null, there are no additional pages.", "nullable": true } }, "additionalProperties": false, "description": "A paginated list of ModelDtos." }, "PaginatedModuleDtoList": { "type": "object", "properties": { "value": { "type": "array", "items": { "$ref": "#/components/schemas/ModuleDto" }, "description": "An array of objects of type ModuleDto.", "nullable": true }, "continuationToken": { "type": "string", "description": "The token used in retrieving the next page. If null, there are no additional pages.", "nullable": true }, "nextLink": { "type": "string", "description": "The link to the next page constructed using the continuationToken. If null, there are no additional pages.", "nullable": true } }, "additionalProperties": false, "description": "A paginated list of ModuleDtos." }, "PaginatedPipelineDraftSummaryList": { "type": "object", "properties": { "value": { "type": "array", "items": { "$ref": "#/components/schemas/PipelineDraftSummary" }, "description": "An array of objects of type PipelineDraftSummary.", "nullable": true }, "continuationToken": { "type": "string", "description": "The token used in retrieving the next page. If null, there are no additional pages.", "nullable": true }, "nextLink": { "type": "string", "description": "The link to the next page constructed using the continuationToken. If null, there are no additional pages.", "nullable": true } }, "additionalProperties": false, "description": "A paginated list of PipelineDraftSummarys." }, "PaginatedPipelineEndpointSummaryList": { "type": "object", "properties": { "value": { "type": "array", "items": { "$ref": "#/components/schemas/PipelineEndpointSummary" }, "description": "An array of objects of type PipelineEndpointSummary.", "nullable": true }, "continuationToken": { "type": "string", "description": "The token used in retrieving the next page. If null, there are no additional pages.", "nullable": true }, "nextLink": { "type": "string", "description": "The link to the next page constructed using the continuationToken. If null, there are no additional pages.", "nullable": true } }, "additionalProperties": false, "description": "A paginated list of PipelineEndpointSummarys." }, "PaginatedPipelineRunSummaryList": { "type": "object", "properties": { "value": { "type": "array", "items": { "$ref": "#/components/schemas/PipelineRunSummary" }, "description": "An array of objects of type PipelineRunSummary.", "nullable": true }, "continuationToken": { "type": "string", "description": "The token used in retrieving the next page. If null, there are no additional pages.", "nullable": true }, "nextLink": { "type": "string", "description": "The link to the next page constructed using the continuationToken. If null, there are no additional pages.", "nullable": true } }, "additionalProperties": false, "description": "A paginated list of PipelineRunSummarys." }, "PaginatedPublishedPipelineSummaryList": { "type": "object", "properties": { "value": { "type": "array", "items": { "$ref": "#/components/schemas/PublishedPipelineSummary" }, "description": "An array of objects of type PublishedPipelineSummary.", "nullable": true }, "continuationToken": { "type": "string", "description": "The token used in retrieving the next page. If null, there are no additional pages.", "nullable": true }, "nextLink": { "type": "string", "description": "The link to the next page constructed using the continuationToken. If null, there are no additional pages.", "nullable": true } }, "additionalProperties": false, "description": "A paginated list of PublishedPipelineSummarys." }, "ParallelForControlFlowInfo": { "type": "object", "properties": { "parallelForItemsInput": { "$ref": "#/components/schemas/ParameterAssignment" } }, "additionalProperties": false }, "ParallelTaskConfiguration": { "type": "object", "properties": { "maxRetriesPerWorker": { "type": "integer", "format": "int32" }, "workerCountPerNode": { "type": "integer", "format": "int32" }, "terminalExitCodes": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true }, "configuration": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } }, "additionalProperties": false }, "Parameter": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "documentation": { "type": "string", "nullable": true }, "defaultValue": { "type": "string", "nullable": true }, "isOptional": { "type": "boolean" }, "minMaxRules": { "type": "array", "items": { "$ref": "#/components/schemas/MinMaxParameterRule" }, "nullable": true }, "enumRules": { "type": "array", "items": { "$ref": "#/components/schemas/EnumParameterRule" }, "nullable": true }, "type": { "$ref": "#/components/schemas/ParameterType" }, "label": { "type": "string", "nullable": true }, "groupNames": { "type": "array", "items": { "type": "string" }, "nullable": true }, "argumentName": { "type": "string", "nullable": true }, "uiHint": { "$ref": "#/components/schemas/UIParameterHint" } }, "additionalProperties": false }, "ParameterAssignment": { "type": "object", "properties": { "valueType": { "$ref": "#/components/schemas/ParameterValueType" }, "assignmentsToConcatenate": { "type": "array", "items": { "$ref": "#/components/schemas/ParameterAssignment" }, "nullable": true }, "dataPathAssignment": { "$ref": "#/components/schemas/LegacyDataPath" }, "dataSetDefinitionValueAssignment": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "name": { "type": "string", "nullable": true }, "value": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ParameterDefinition": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "type": { "type": "string", "nullable": true }, "value": { "type": "string", "nullable": true }, "isOptional": { "type": "boolean" } }, "additionalProperties": false }, "ParameterType": { "enum": [ "Int", "Double", "Bool", "String", "Undefined" ], "type": "string" }, "ParameterValueType": { "enum": [ "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition" ], "type": "string" }, "PatchFlowRequest": { "type": "object", "properties": { "flowPatchOperationType": { "$ref": "#/components/schemas/FlowPatchOperationType" }, "flowDefinitionFilePath": { "type": "string", "nullable": true } }, "additionalProperties": false }, "Pipeline": { "type": "object", "properties": { "runId": { "type": "string", "nullable": true }, "continueRunOnStepFailure": { "type": "boolean" }, "defaultDatastoreName": { "type": "string", "nullable": true }, "componentJobs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ComponentJob" }, "description": "This is a dictionary", "nullable": true }, "inputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/PipelineInput" }, "description": "This is a dictionary", "nullable": true }, "outputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/PipelineOutput" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "PipelineDraft": { "type": "object", "properties": { "graphDraftId": { "type": "string", "nullable": true }, "sourcePipelineRunId": { "type": "string", "nullable": true }, "latestPipelineRunId": { "type": "string", "nullable": true }, "latestRunExperimentName": { "type": "string", "nullable": true }, "latestRunExperimentId": { "type": "string", "nullable": true }, "isLatestRunExperimentArchived": { "type": "boolean", "nullable": true }, "status": { "$ref": "#/components/schemas/PipelineStatus" }, "graphDetail": { "$ref": "#/components/schemas/PipelineRunGraphDetail" }, "realTimeEndpointInfo": { "$ref": "#/components/schemas/RealTimeEndpointInfo" }, "linkedPipelinesInfo": { "type": "array", "items": { "$ref": "#/components/schemas/LinkedPipelineInfo" }, "nullable": true }, "nodesInDraft": { "type": "array", "items": { "type": "string" }, "nullable": true }, "studioMigrationInfo": { "$ref": "#/components/schemas/StudioMigrationInfo" }, "flattenedSubGraphs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/PipelineSubDraft" }, "nullable": true }, "pipelineRunSettingParameters": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameter" }, "nullable": true }, "pipelineRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameterAssignment" }, "nullable": true }, "continueRunOnStepFailure": { "type": "boolean" }, "continueRunOnFailedOptionalInput": { "type": "boolean" }, "defaultCompute": { "$ref": "#/components/schemas/ComputeSetting" }, "defaultDatastore": { "$ref": "#/components/schemas/DatastoreSetting" }, "defaultCloudPriority": { "$ref": "#/components/schemas/CloudPrioritySetting" }, "enforceRerun": { "type": "boolean", "nullable": true }, "pipelineParameters": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataPathAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/LegacyDataPath" }, "description": "This is a dictionary", "nullable": true }, "dataSetDefinitionValueAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "description": "This is a dictionary", "nullable": true }, "assetOutputSettingsAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AssetOutputSettings" }, "description": "This is a dictionary", "nullable": true }, "pipelineTimeout": { "type": "integer", "format": "int32" }, "identityConfig": { "$ref": "#/components/schemas/IdentitySetting" }, "graphComponentsMode": { "$ref": "#/components/schemas/GraphComponentsMode" }, "name": { "type": "string", "nullable": true }, "lastEditedBy": { "type": "string", "nullable": true }, "createdBy": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "pipelineType": { "$ref": "#/components/schemas/PipelineType" }, "pipelineDraftMode": { "$ref": "#/components/schemas/PipelineDraftMode" }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "entityStatus": { "$ref": "#/components/schemas/EntityStatus" }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "PipelineDraftMode": { "enum": [ "None", "Normal", "Custom" ], "type": "string" }, "PipelineDraftStepDetails": { "type": "object", "properties": { "runId": { "type": "string", "nullable": true }, "target": { "type": "string", "nullable": true }, "status": { "$ref": "#/components/schemas/RunStatus" }, "statusDetail": { "type": "string", "nullable": true }, "parentRunId": { "type": "string", "nullable": true }, "startTime": { "type": "string", "format": "date-time", "nullable": true }, "endTime": { "type": "string", "format": "date-time", "nullable": true }, "isReused": { "type": "boolean", "nullable": true }, "reusedRunId": { "type": "string", "nullable": true }, "reusedPipelineRunId": { "type": "string", "nullable": true }, "logs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "outputLog": { "type": "string", "nullable": true }, "runConfiguration": { "$ref": "#/components/schemas/RunConfiguration" }, "outputs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "portOutputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/PortOutputInfo" }, "description": "This is a dictionary", "nullable": true }, "isExperimentArchived": { "type": "boolean", "nullable": true } }, "additionalProperties": false }, "PipelineDraftSummary": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "lastEditedBy": { "type": "string", "nullable": true }, "createdBy": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "pipelineType": { "$ref": "#/components/schemas/PipelineType" }, "pipelineDraftMode": { "$ref": "#/components/schemas/PipelineDraftMode" }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "entityStatus": { "$ref": "#/components/schemas/EntityStatus" }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "PipelineEndpoint": { "type": "object", "properties": { "defaultVersion": { "type": "string", "nullable": true }, "defaultPipelineId": { "type": "string", "nullable": true }, "defaultGraphId": { "type": "string", "nullable": true }, "restEndpoint": { "type": "string", "nullable": true }, "publishedDate": { "type": "string", "format": "date-time" }, "publishedBy": { "type": "string", "nullable": true }, "parameters": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataSetDefinitionValueAssignment": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "description": "This is a dictionary", "nullable": true }, "defaultPipelineName": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "updatedBy": { "type": "string", "nullable": true }, "swaggerUrl": { "type": "string", "nullable": true }, "lastRunTime": { "type": "string", "format": "date-time", "nullable": true }, "lastRunStatus": { "$ref": "#/components/schemas/PipelineRunStatusCode" }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "entityStatus": { "$ref": "#/components/schemas/EntityStatus" }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "PipelineEndpointSummary": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "updatedBy": { "type": "string", "nullable": true }, "swaggerUrl": { "type": "string", "nullable": true }, "lastRunTime": { "type": "string", "format": "date-time", "nullable": true }, "lastRunStatus": { "$ref": "#/components/schemas/PipelineRunStatusCode" }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "entityStatus": { "$ref": "#/components/schemas/EntityStatus" }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "PipelineGraph": { "type": "object", "properties": { "graphModuleDtos": { "type": "array", "items": { "$ref": "#/components/schemas/ModuleDto" }, "nullable": true }, "graphDataSources": { "type": "array", "items": { "$ref": "#/components/schemas/DataInfo" }, "nullable": true }, "graphs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/PipelineGraph" }, "description": "This is a dictionary", "nullable": true }, "graphDrafts": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/PipelineGraph" }, "description": "This is a dictionary", "nullable": true }, "moduleNodeRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeRunSetting" }, "nullable": true }, "moduleNodeUIInputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeUIInputSetting" }, "nullable": true }, "subPipelinesInfo": { "$ref": "#/components/schemas/SubPipelinesInfo" }, "referencedNodeId": { "type": "string", "nullable": true }, "pipelineRunSettingParameters": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameter" }, "nullable": true }, "pipelineRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameterAssignment" }, "nullable": true }, "realTimeEndpointInfo": { "$ref": "#/components/schemas/RealTimeEndpointInfo" }, "nodeTelemetryMetaInfos": { "type": "array", "items": { "$ref": "#/components/schemas/NodeTelemetryMetaInfo" }, "nullable": true }, "graphComponentsMode": { "$ref": "#/components/schemas/GraphComponentsMode" }, "moduleNodes": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNode" }, "nullable": true }, "datasetNodes": { "type": "array", "items": { "$ref": "#/components/schemas/GraphDatasetNode" }, "nullable": true }, "subGraphNodes": { "type": "array", "items": { "$ref": "#/components/schemas/GraphReferenceNode" }, "nullable": true }, "controlReferenceNodes": { "type": "array", "items": { "$ref": "#/components/schemas/GraphControlReferenceNode" }, "nullable": true }, "controlNodes": { "type": "array", "items": { "$ref": "#/components/schemas/GraphControlNode" }, "nullable": true }, "edges": { "type": "array", "items": { "$ref": "#/components/schemas/GraphEdge" }, "nullable": true }, "entityInterface": { "$ref": "#/components/schemas/EntityInterface" }, "graphLayout": { "$ref": "#/components/schemas/GraphLayout" }, "createdBy": { "$ref": "#/components/schemas/CreatedBy" }, "lastUpdatedBy": { "$ref": "#/components/schemas/CreatedBy" }, "defaultCompute": { "$ref": "#/components/schemas/ComputeSetting" }, "defaultDatastore": { "$ref": "#/components/schemas/DatastoreSetting" }, "defaultCloudPriority": { "$ref": "#/components/schemas/CloudPrioritySetting" }, "extendedProperties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "parentSubGraphModuleIds": { "type": "array", "items": { "type": "string" }, "nullable": true }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "PipelineInput": { "type": "object", "properties": { "data": { "$ref": "#/components/schemas/InputData" } }, "additionalProperties": false }, "PipelineJob": { "type": "object", "properties": { "jobType": { "$ref": "#/components/schemas/JobType" }, "pipelineJobType": { "$ref": "#/components/schemas/MfeInternalPipelineType" }, "pipeline": { "$ref": "#/components/schemas/Pipeline" }, "computeId": { "type": "string", "nullable": true }, "runId": { "type": "string", "nullable": true }, "settings": { "nullable": true }, "componentJobs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/MfeInternalV20211001ComponentJob" }, "description": "This is a dictionary", "nullable": true }, "inputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/JobInput" }, "description": "This is a dictionary", "nullable": true }, "outputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/JobOutput" }, "description": "This is a dictionary", "nullable": true }, "bindings": { "type": "array", "items": { "$ref": "#/components/schemas/Binding" }, "nullable": true }, "jobs": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary", "nullable": true }, "inputBindings": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/InputDataBinding" }, "description": "This is a dictionary", "nullable": true }, "outputBindings": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/OutputDataBinding" }, "description": "This is a dictionary", "nullable": true }, "sourceJobId": { "type": "string", "nullable": true }, "provisioningState": { "$ref": "#/components/schemas/JobProvisioningState" }, "parentJobName": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "experimentName": { "type": "string", "nullable": true }, "status": { "$ref": "#/components/schemas/JobStatus" }, "interactionEndpoints": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/JobEndpoint" }, "nullable": true }, "identity": { "$ref": "#/components/schemas/MfeInternalIdentityConfiguration" }, "compute": { "$ref": "#/components/schemas/ComputeConfiguration" }, "priority": { "type": "integer", "format": "int32", "nullable": true }, "output": { "$ref": "#/components/schemas/JobOutputArtifacts" }, "isArchived": { "type": "boolean" }, "schedule": { "$ref": "#/components/schemas/ScheduleBase" }, "componentId": { "type": "string", "nullable": true }, "notificationSetting": { "$ref": "#/components/schemas/NotificationSetting" }, "secretsConfiguration": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/MfeInternalSecretConfiguration" }, "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "PipelineJobRuntimeBasicSettings": { "type": "object", "properties": { "pipelineRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameterAssignment" }, "nullable": true }, "experimentName": { "type": "string", "nullable": true }, "pipelineJobName": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "triggerTimeString": { "type": "string", "nullable": true }, "pipelineParameters": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataPathAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/LegacyDataPath" }, "description": "This is a dictionary", "nullable": true }, "dataSetDefinitionValueAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "description": "This is a dictionary", "nullable": true }, "assetOutputSettingsAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AssetOutputSettings" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "PipelineJobScheduleDto": { "type": "object", "properties": { "systemData": { "$ref": "#/components/schemas/SystemData" }, "name": { "type": "string", "nullable": true }, "pipelineJobName": { "type": "string", "nullable": true }, "pipelineJobRuntimeSettings": { "$ref": "#/components/schemas/PipelineJobRuntimeBasicSettings" }, "displayName": { "type": "string", "nullable": true }, "triggerType": { "$ref": "#/components/schemas/TriggerType" }, "recurrence": { "$ref": "#/components/schemas/Recurrence" }, "cron": { "$ref": "#/components/schemas/Cron" }, "status": { "$ref": "#/components/schemas/ScheduleStatus" }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "PipelineOutput": { "type": "object", "properties": { "data": { "$ref": "#/components/schemas/MfeInternalOutputData" } }, "additionalProperties": false }, "PipelineRun": { "type": "object", "properties": { "pipelineId": { "type": "string", "nullable": true }, "runSource": { "type": "string", "nullable": true }, "runType": { "$ref": "#/components/schemas/RunType" }, "parameters": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataPathAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/LegacyDataPath" }, "description": "This is a dictionary", "nullable": true }, "dataSetDefinitionValueAssignment": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "description": "This is a dictionary", "nullable": true }, "assetOutputSettingsAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AssetOutputSettings" }, "description": "This is a dictionary", "nullable": true }, "totalSteps": { "type": "integer", "format": "int32" }, "logs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "userAlias": { "type": "string", "nullable": true }, "enforceRerun": { "type": "boolean", "nullable": true }, "continueRunOnFailedOptionalInput": { "type": "boolean" }, "defaultCompute": { "$ref": "#/components/schemas/ComputeSetting" }, "defaultDatastore": { "$ref": "#/components/schemas/DatastoreSetting" }, "defaultCloudPriority": { "$ref": "#/components/schemas/CloudPrioritySetting" }, "pipelineTimeoutSeconds": { "type": "integer", "format": "int32", "nullable": true }, "continueRunOnStepFailure": { "type": "boolean" }, "identityConfig": { "$ref": "#/components/schemas/IdentitySetting" }, "description": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "runNumber": { "type": "integer", "format": "int32", "nullable": true }, "statusCode": { "$ref": "#/components/schemas/PipelineStatusCode" }, "runStatus": { "$ref": "#/components/schemas/RunStatus" }, "statusDetail": { "type": "string", "nullable": true }, "startTime": { "type": "string", "format": "date-time", "nullable": true }, "endTime": { "type": "string", "format": "date-time", "nullable": true }, "graphId": { "type": "string", "nullable": true }, "experimentId": { "type": "string", "nullable": true }, "experimentName": { "type": "string", "nullable": true }, "isExperimentArchived": { "type": "boolean", "nullable": true }, "submittedBy": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "stepTags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "aetherStartTime": { "type": "string", "format": "date-time", "nullable": true }, "aetherEndTime": { "type": "string", "format": "date-time", "nullable": true }, "runHistoryStartTime": { "type": "string", "format": "date-time", "nullable": true }, "runHistoryEndTime": { "type": "string", "format": "date-time", "nullable": true }, "uniqueChildRunComputeTargets": { "uniqueItems": true, "type": "array", "items": { "type": "string" }, "nullable": true }, "entityStatus": { "$ref": "#/components/schemas/EntityStatus" }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "PipelineRunGraphDetail": { "type": "object", "properties": { "graph": { "$ref": "#/components/schemas/PipelineGraph" }, "graphNodesStatus": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/GraphNodeStatusInfo" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "PipelineRunGraphStatus": { "type": "object", "properties": { "status": { "$ref": "#/components/schemas/PipelineStatus" }, "graphNodesStatus": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/GraphNodeStatusInfo" }, "description": "This is a dictionary", "nullable": true }, "experimentId": { "type": "string", "nullable": true }, "isExperimentArchived": { "type": "boolean", "nullable": true } }, "additionalProperties": false }, "PipelineRunProfile": { "type": "object", "properties": { "runId": { "type": "string", "nullable": true }, "nodeId": { "type": "string", "nullable": true }, "runUrl": { "type": "string", "nullable": true }, "experimentName": { "type": "string", "nullable": true }, "experimentId": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "status": { "$ref": "#/components/schemas/PipelineRunStatus" }, "createTime": { "type": "integer", "format": "int64", "nullable": true }, "startTime": { "type": "integer", "format": "int64", "nullable": true }, "endTime": { "type": "integer", "format": "int64", "nullable": true }, "profilingTime": { "type": "integer", "format": "int64", "nullable": true }, "stepRunsProfile": { "type": "array", "items": { "$ref": "#/components/schemas/StepRunProfile" }, "nullable": true }, "subPipelineRunProfile": { "type": "array", "items": { "$ref": "#/components/schemas/PipelineRunProfile" }, "nullable": true } }, "additionalProperties": false }, "PipelineRunStatus": { "type": "object", "properties": { "statusCode": { "$ref": "#/components/schemas/PipelineRunStatusCode" }, "statusDetail": { "type": "string", "nullable": true }, "creationTime": { "type": "string", "format": "date-time" }, "endTime": { "type": "string", "format": "date-time", "nullable": true } }, "additionalProperties": false }, "PipelineRunStatusCode": { "enum": [ "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested" ], "type": "string" }, "PipelineRunStepDetails": { "type": "object", "properties": { "runId": { "type": "string", "nullable": true }, "target": { "type": "string", "nullable": true }, "status": { "$ref": "#/components/schemas/RunStatus" }, "statusDetail": { "type": "string", "nullable": true }, "parentRunId": { "type": "string", "nullable": true }, "startTime": { "type": "string", "format": "date-time", "nullable": true }, "endTime": { "type": "string", "format": "date-time", "nullable": true }, "isReused": { "type": "boolean", "nullable": true }, "logs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "outputs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "snapshotInfo": { "$ref": "#/components/schemas/SnapshotInfo" }, "inputDatasets": { "uniqueItems": true, "type": "array", "items": { "$ref": "#/components/schemas/DatasetLineage" }, "nullable": true }, "outputDatasets": { "uniqueItems": true, "type": "array", "items": { "$ref": "#/components/schemas/OutputDatasetLineage" }, "nullable": true } }, "additionalProperties": false }, "PipelineRunSummary": { "type": "object", "properties": { "description": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "runNumber": { "type": "integer", "format": "int32", "nullable": true }, "statusCode": { "$ref": "#/components/schemas/PipelineStatusCode" }, "runStatus": { "$ref": "#/components/schemas/RunStatus" }, "statusDetail": { "type": "string", "nullable": true }, "startTime": { "type": "string", "format": "date-time", "nullable": true }, "endTime": { "type": "string", "format": "date-time", "nullable": true }, "graphId": { "type": "string", "nullable": true }, "experimentId": { "type": "string", "nullable": true }, "experimentName": { "type": "string", "nullable": true }, "isExperimentArchived": { "type": "boolean", "nullable": true }, "submittedBy": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "stepTags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "aetherStartTime": { "type": "string", "format": "date-time", "nullable": true }, "aetherEndTime": { "type": "string", "format": "date-time", "nullable": true }, "runHistoryStartTime": { "type": "string", "format": "date-time", "nullable": true }, "runHistoryEndTime": { "type": "string", "format": "date-time", "nullable": true }, "uniqueChildRunComputeTargets": { "uniqueItems": true, "type": "array", "items": { "type": "string" }, "nullable": true }, "entityStatus": { "$ref": "#/components/schemas/EntityStatus" }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "PipelineStatus": { "type": "object", "properties": { "statusCode": { "$ref": "#/components/schemas/PipelineStatusCode" }, "runStatus": { "$ref": "#/components/schemas/RunStatus" }, "statusDetail": { "type": "string", "nullable": true }, "startTime": { "type": "string", "format": "date-time", "nullable": true }, "endTime": { "type": "string", "format": "date-time", "nullable": true }, "isTerminalState": { "type": "boolean", "readOnly": true } }, "additionalProperties": false }, "PipelineStatusCode": { "enum": [ "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown" ], "type": "string" }, "PipelineStepRun": { "type": "object", "properties": { "stepName": { "type": "string", "nullable": true }, "runNumber": { "type": "integer", "format": "int32", "nullable": true }, "runId": { "type": "string", "nullable": true }, "startTime": { "type": "string", "format": "date-time", "nullable": true }, "endTime": { "type": "string", "format": "date-time", "nullable": true }, "runStatus": { "$ref": "#/components/schemas/RunStatus" }, "computeTarget": { "type": "string", "nullable": true }, "computeType": { "type": "string", "nullable": true }, "runType": { "type": "string", "nullable": true }, "stepType": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "isReused": { "type": "boolean", "nullable": true }, "displayName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "PipelineStepRunOutputs": { "type": "object", "properties": { "outputs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "portOutputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/PortOutputInfo" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "PipelineSubDraft": { "type": "object", "properties": { "parentGraphDraftId": { "type": "string", "nullable": true }, "parentNodeId": { "type": "string", "nullable": true }, "graphDetail": { "$ref": "#/components/schemas/PipelineRunGraphDetail" }, "moduleDto": { "$ref": "#/components/schemas/ModuleDto" }, "name": { "type": "string", "nullable": true }, "lastEditedBy": { "type": "string", "nullable": true }, "createdBy": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "pipelineType": { "$ref": "#/components/schemas/PipelineType" }, "pipelineDraftMode": { "$ref": "#/components/schemas/PipelineDraftMode" }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "entityStatus": { "$ref": "#/components/schemas/EntityStatus" }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "PipelineType": { "enum": [ "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown" ], "type": "string" }, "PolicyValidationResponse": { "type": "object", "properties": { "errorResponse": { "$ref": "#/components/schemas/ErrorResponse" }, "nextActionIntervalInSeconds": { "type": "integer", "format": "int32", "nullable": true }, "actionType": { "$ref": "#/components/schemas/ActionType" } }, "additionalProperties": false }, "PortAction": { "enum": [ "Promote", "ViewInDataStore", "Visualize", "GetSchema", "CreateInferenceGraph", "RegisterModel", "PromoteAsTabular" ], "type": "string" }, "PortInfo": { "type": "object", "properties": { "nodeId": { "type": "string", "nullable": true }, "portName": { "type": "string", "nullable": true }, "graphPortName": { "type": "string", "nullable": true }, "isParameter": { "type": "boolean" }, "webServicePort": { "type": "string", "nullable": true } }, "additionalProperties": false }, "PortOutputInfo": { "type": "object", "properties": { "containerUri": { "type": "string", "format": "uri", "nullable": true }, "relativePath": { "type": "string", "nullable": true }, "previewParams": { "type": "string", "nullable": true }, "modelOutputPath": { "type": "string", "nullable": true }, "dataStoreName": { "type": "string", "nullable": true }, "dataReferenceType": { "$ref": "#/components/schemas/DataReferenceType" }, "isFile": { "type": "boolean" }, "supportedActions": { "type": "array", "items": { "$ref": "#/components/schemas/PortAction" }, "nullable": true } }, "additionalProperties": false }, "PrimaryMetrics": { "enum": [ "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError", "NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou" ], "type": "string" }, "PriorityConfig": { "type": "object", "properties": { "jobPriority": { "type": "integer", "format": "int32", "nullable": true }, "isPreemptible": { "type": "boolean", "nullable": true }, "nodeCountSet": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true }, "scaleInterval": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "PriorityConfiguration": { "type": "object", "properties": { "cloudPriority": { "type": "integer", "format": "int32", "nullable": true }, "stringTypePriority": { "type": "string", "nullable": true } }, "additionalProperties": false }, "PromoteDataSetRequest": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "moduleNodeId": { "type": "string", "nullable": true }, "stepRunId": { "type": "string", "nullable": true }, "outputPortName": { "type": "string", "nullable": true }, "modelOutputPath": { "type": "string", "nullable": true }, "dataTypeId": { "type": "string", "nullable": true }, "datasetType": { "type": "string", "nullable": true }, "dataStoreName": { "type": "string", "nullable": true }, "outputRelativePath": { "type": "string", "nullable": true }, "pipelineRunId": { "type": "string", "nullable": true }, "rootPipelineRunId": { "type": "string", "nullable": true }, "experimentName": { "type": "string", "nullable": true }, "experimentId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ProviderEntity": { "type": "object", "properties": { "provider": { "type": "string", "nullable": true }, "module": { "type": "string", "nullable": true }, "connection_type": { "type": "array", "items": { "$ref": "#/components/schemas/ConnectionType" }, "nullable": true }, "apis": { "type": "array", "items": { "$ref": "#/components/schemas/ApiAndParameters" }, "nullable": true } }, "additionalProperties": false }, "ProvisioningState": { "enum": [ "Unknown", "Updating", "Creating", "Deleting", "Accepted", "Succeeded", "Failed", "Canceled" ], "type": "string" }, "PublishedPipeline": { "type": "object", "properties": { "totalRunSteps": { "type": "integer", "format": "int32" }, "totalRuns": { "type": "integer", "format": "int32" }, "parameters": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataSetDefinitionValueAssignment": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "description": "This is a dictionary", "nullable": true }, "restEndpoint": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "graphId": { "type": "string", "nullable": true }, "publishedDate": { "type": "string", "format": "date-time" }, "lastRunTime": { "type": "string", "format": "date-time", "nullable": true }, "lastRunStatus": { "$ref": "#/components/schemas/PipelineRunStatusCode" }, "publishedBy": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "version": { "type": "string", "nullable": true }, "isDefault": { "type": "boolean", "nullable": true }, "entityStatus": { "$ref": "#/components/schemas/EntityStatus" }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "PublishedPipelineSummary": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "graphId": { "type": "string", "nullable": true }, "publishedDate": { "type": "string", "format": "date-time" }, "lastRunTime": { "type": "string", "format": "date-time", "nullable": true }, "lastRunStatus": { "$ref": "#/components/schemas/PipelineRunStatusCode" }, "publishedBy": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "version": { "type": "string", "nullable": true }, "isDefault": { "type": "boolean", "nullable": true }, "entityStatus": { "$ref": "#/components/schemas/EntityStatus" }, "id": { "type": "string", "nullable": true }, "etag": { "type": "string", "nullable": true }, "createdDate": { "type": "string", "format": "date-time" }, "lastModifiedDate": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "PyTorchConfiguration": { "type": "object", "properties": { "communicationBackend": { "type": "string", "nullable": true }, "processCount": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "PythonInterfaceMapping": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "nameInYaml": { "type": "string", "nullable": true }, "argumentName": { "type": "string", "nullable": true }, "commandLineOption": { "type": "string", "nullable": true } }, "additionalProperties": false }, "PythonPyPiOrRCranLibraryDto": { "type": "object", "properties": { "package": { "type": "string", "nullable": true }, "repo": { "type": "string", "nullable": true } }, "additionalProperties": false }, "PythonSection": { "type": "object", "properties": { "interpreterPath": { "type": "string", "nullable": true }, "userManagedDependencies": { "type": "boolean" }, "condaDependencies": { "nullable": true }, "baseCondaEnvironment": { "type": "string", "nullable": true } }, "additionalProperties": false }, "QueueingInfo": { "type": "object", "properties": { "code": { "type": "string", "nullable": true }, "message": { "type": "string", "nullable": true }, "lastRefreshTimestamp": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "RCranPackage": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "repository": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RGitHubPackage": { "type": "object", "properties": { "repository": { "type": "string", "nullable": true }, "authToken": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RSection": { "type": "object", "properties": { "rVersion": { "type": "string", "nullable": true }, "userManaged": { "type": "boolean" }, "rscriptPath": { "type": "string", "nullable": true }, "snapshotDate": { "type": "string", "nullable": true }, "cranPackages": { "type": "array", "items": { "$ref": "#/components/schemas/RCranPackage" }, "nullable": true }, "gitHubPackages": { "type": "array", "items": { "$ref": "#/components/schemas/RGitHubPackage" }, "nullable": true }, "customUrlPackages": { "type": "array", "items": { "type": "string" }, "nullable": true }, "bioConductorPackages": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "RawComponentDto": { "type": "object", "properties": { "componentSchema": { "type": "string", "nullable": true }, "isAnonymous": { "type": "boolean" }, "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "type": { "$ref": "#/components/schemas/ComponentType" }, "componentTypeVersion": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "isDeterministic": { "type": "boolean" }, "successfulReturnCode": { "type": "string", "nullable": true }, "inputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ComponentInput" }, "description": "This is a dictionary", "nullable": true }, "outputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ComponentOutput" }, "description": "This is a dictionary", "nullable": true }, "command": { "type": "string", "nullable": true }, "environmentName": { "type": "string", "nullable": true }, "environmentVersion": { "type": "string", "nullable": true }, "snapshotId": { "type": "string", "nullable": true }, "createdBy": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" }, "lastModifiedBy": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" }, "createdDate": { "type": "string", "format": "date-time", "nullable": true }, "lastModifiedDate": { "type": "string", "format": "date-time", "nullable": true }, "componentInternalId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RayConfiguration": { "type": "object", "properties": { "port": { "type": "integer", "format": "int32", "nullable": true }, "address": { "type": "string", "nullable": true }, "includeDashboard": { "type": "boolean", "nullable": true }, "dashboardPort": { "type": "integer", "format": "int32", "nullable": true }, "headNodeAdditionalArgs": { "type": "string", "nullable": true }, "workerNodeAdditionalArgs": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RealTimeEndpoint": { "type": "object", "properties": { "createdBy": { "type": "string", "nullable": true }, "kvTags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "state": { "$ref": "#/components/schemas/WebServiceState" }, "error": { "$ref": "#/components/schemas/ModelManagementErrorResponse" }, "computeType": { "$ref": "#/components/schemas/ComputeEnvironmentType" }, "imageId": { "type": "string", "nullable": true }, "cpu": { "type": "number", "format": "double", "nullable": true }, "memoryInGB": { "type": "number", "format": "double", "nullable": true }, "maxConcurrentRequestsPerContainer": { "type": "integer", "format": "int32", "nullable": true }, "numReplicas": { "type": "integer", "format": "int32", "nullable": true }, "eventHubEnabled": { "type": "boolean", "nullable": true }, "storageEnabled": { "type": "boolean", "nullable": true }, "appInsightsEnabled": { "type": "boolean", "nullable": true }, "autoScaleEnabled": { "type": "boolean", "nullable": true }, "minReplicas": { "type": "integer", "format": "int32", "nullable": true }, "maxReplicas": { "type": "integer", "format": "int32", "nullable": true }, "targetUtilization": { "type": "integer", "format": "int32", "nullable": true }, "refreshPeriodInSeconds": { "type": "integer", "format": "int32", "nullable": true }, "scoringUri": { "type": "string", "format": "uri", "nullable": true }, "deploymentStatus": { "$ref": "#/components/schemas/AKSReplicaStatus" }, "scoringTimeoutMs": { "type": "integer", "format": "int32", "nullable": true }, "authEnabled": { "type": "boolean", "nullable": true }, "aadAuthEnabled": { "type": "boolean", "nullable": true }, "region": { "type": "string", "nullable": true }, "primaryKey": { "type": "string", "nullable": true }, "secondaryKey": { "type": "string", "nullable": true }, "swaggerUri": { "type": "string", "format": "uri", "nullable": true }, "linkedPipelineDraftId": { "type": "string", "nullable": true }, "linkedPipelineRunId": { "type": "string", "nullable": true }, "warning": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "id": { "type": "string", "nullable": true }, "createdTime": { "type": "string", "format": "date-time", "nullable": true }, "updatedTime": { "type": "string", "format": "date-time", "nullable": true }, "computeName": { "type": "string", "nullable": true }, "updatedBy": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RealTimeEndpointInfo": { "type": "object", "properties": { "webServiceInputs": { "type": "array", "items": { "$ref": "#/components/schemas/WebServicePort" }, "nullable": true }, "webServiceOutputs": { "type": "array", "items": { "$ref": "#/components/schemas/WebServicePort" }, "nullable": true }, "deploymentsInfo": { "type": "array", "items": { "$ref": "#/components/schemas/DeploymentInfo" }, "nullable": true } }, "additionalProperties": false }, "RealTimeEndpointInternalStepCode": { "enum": [ "AboutToDeploy", "WaitAksComputeReady", "RegisterModels", "CreateServiceFromModels", "UpdateServiceFromModels", "WaitServiceCreating", "FetchServiceRelatedInfo", "TestWithSampleData", "AboutToDelete", "DeleteDeployment", "DeleteAsset", "DeleteImage", "DeleteModel", "DeleteServiceRecord" ], "type": "string" }, "RealTimeEndpointOpCode": { "enum": [ "Create", "Update", "Delete" ], "type": "string" }, "RealTimeEndpointOpStatusCode": { "enum": [ "Ongoing", "Succeeded", "Failed", "SucceededWithWarning" ], "type": "string" }, "RealTimeEndpointStatus": { "type": "object", "properties": { "lastOperation": { "$ref": "#/components/schemas/RealTimeEndpointOpCode" }, "lastOperationStatus": { "$ref": "#/components/schemas/RealTimeEndpointOpStatusCode" }, "internalStep": { "$ref": "#/components/schemas/RealTimeEndpointInternalStepCode" }, "statusDetail": { "type": "string", "nullable": true }, "deploymentState": { "type": "string", "nullable": true }, "serviceId": { "type": "string", "nullable": true }, "linkedPipelineDraftId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RealTimeEndpointSummary": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "id": { "type": "string", "nullable": true }, "createdTime": { "type": "string", "format": "date-time", "nullable": true }, "updatedTime": { "type": "string", "format": "date-time", "nullable": true }, "computeType": { "$ref": "#/components/schemas/ComputeEnvironmentType" }, "computeName": { "type": "string", "nullable": true }, "updatedBy": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RealTimeEndpointTestRequest": { "type": "object", "properties": { "endPoint": { "type": "string", "nullable": true }, "authKey": { "type": "string", "nullable": true }, "payload": { "type": "string", "nullable": true } }, "additionalProperties": false }, "Recurrence": { "type": "object", "properties": { "frequency": { "$ref": "#/components/schemas/Frequency" }, "interval": { "type": "integer", "format": "int32" }, "schedule": { "$ref": "#/components/schemas/RecurrenceSchedule" }, "endTime": { "type": "string", "nullable": true }, "startTime": { "type": "string", "nullable": true }, "timeZone": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RecurrenceFrequency": { "enum": [ "Minute", "Hour", "Day", "Week", "Month" ], "type": "string" }, "RecurrencePattern": { "type": "object", "properties": { "hours": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true }, "minutes": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true }, "weekdays": { "type": "array", "items": { "$ref": "#/components/schemas/Weekday" }, "nullable": true } }, "additionalProperties": false }, "RecurrenceSchedule": { "type": "object", "properties": { "hours": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true }, "minutes": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true }, "weekDays": { "type": "array", "items": { "$ref": "#/components/schemas/WeekDays" }, "nullable": true }, "monthDays": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true } }, "additionalProperties": false }, "RegenerateServiceKeysRequest": { "type": "object", "properties": { "keyType": { "$ref": "#/components/schemas/KeyType" }, "keyValue": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RegisterComponentMetaInfo": { "type": "object", "properties": { "amlModuleName": { "type": "string", "nullable": true }, "nameOnlyDisplayInfo": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "moduleVersionId": { "type": "string", "nullable": true }, "snapshotId": { "type": "string", "nullable": true }, "componentRegistrationType": { "$ref": "#/components/schemas/ComponentRegistrationTypeEnum" }, "moduleEntityFromYaml": { "$ref": "#/components/schemas/ModuleEntity" }, "setAsDefaultVersion": { "type": "boolean" }, "dataTypesFromYaml": { "type": "array", "items": { "$ref": "#/components/schemas/DataTypeCreationInfo" }, "nullable": true }, "dataTypeMechanism": { "$ref": "#/components/schemas/DataTypeMechanism" }, "identifierHash": { "type": "string", "nullable": true }, "identifierHashes": { "type": "object", "properties": { "IdentifierHash": { "type": "string" }, "IdentifierHashV2": { "type": "string" } }, "additionalProperties": false, "nullable": true }, "contentHash": { "type": "string", "nullable": true }, "extraHash": { "type": "string", "nullable": true }, "extraHashes": { "type": "object", "properties": { "IdentifierHash": { "type": "string" }, "IdentifierHashV2": { "type": "string" } }, "additionalProperties": false, "nullable": true }, "registration": { "type": "boolean", "nullable": true }, "validateOnly": { "type": "boolean" }, "skipWorkspaceRelatedCheck": { "type": "boolean" }, "intellectualPropertyProtectedWorkspaceComponentRegistrationAllowedPublisher": { "type": "array", "items": { "type": "string" }, "nullable": true }, "systemManagedRegistration": { "type": "boolean" }, "allowDupNameBetweenInputAndOuputPort": { "type": "boolean" }, "moduleSource": { "type": "string", "nullable": true }, "moduleScope": { "type": "string", "nullable": true }, "moduleAdditionalIncludesCount": { "type": "integer", "format": "int32", "nullable": true }, "moduleOSType": { "type": "string", "nullable": true }, "moduleCodegenBy": { "type": "string", "nullable": true }, "moduleClientSource": { "type": "string", "nullable": true }, "moduleIsBuiltin": { "type": "boolean" }, "moduleRegisterEventExtensionFields": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } }, "additionalProperties": false }, "RegisterRegistryComponentMetaInfo": { "type": "object", "properties": { "registryName": { "type": "string", "nullable": true }, "intellectualPropertyPublisherInformation": { "$ref": "#/components/schemas/IntellectualPropertyPublisherInformation" }, "blobReferenceData": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/RegistryBlobReferenceData" }, "description": "This is a dictionary", "nullable": true }, "amlModuleName": { "type": "string", "nullable": true }, "nameOnlyDisplayInfo": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "moduleVersionId": { "type": "string", "nullable": true }, "snapshotId": { "type": "string", "nullable": true }, "componentRegistrationType": { "$ref": "#/components/schemas/ComponentRegistrationTypeEnum" }, "moduleEntityFromYaml": { "$ref": "#/components/schemas/ModuleEntity" }, "setAsDefaultVersion": { "type": "boolean" }, "dataTypesFromYaml": { "type": "array", "items": { "$ref": "#/components/schemas/DataTypeCreationInfo" }, "nullable": true }, "dataTypeMechanism": { "$ref": "#/components/schemas/DataTypeMechanism" }, "identifierHash": { "type": "string", "nullable": true }, "identifierHashes": { "type": "object", "properties": { "IdentifierHash": { "type": "string" }, "IdentifierHashV2": { "type": "string" } }, "additionalProperties": false, "nullable": true }, "contentHash": { "type": "string", "nullable": true }, "extraHash": { "type": "string", "nullable": true }, "extraHashes": { "type": "object", "properties": { "IdentifierHash": { "type": "string" }, "IdentifierHashV2": { "type": "string" } }, "additionalProperties": false, "nullable": true }, "registration": { "type": "boolean", "nullable": true }, "validateOnly": { "type": "boolean" }, "skipWorkspaceRelatedCheck": { "type": "boolean" }, "intellectualPropertyProtectedWorkspaceComponentRegistrationAllowedPublisher": { "type": "array", "items": { "type": "string" }, "nullable": true }, "systemManagedRegistration": { "type": "boolean" }, "allowDupNameBetweenInputAndOuputPort": { "type": "boolean" }, "moduleSource": { "type": "string", "nullable": true }, "moduleScope": { "type": "string", "nullable": true }, "moduleAdditionalIncludesCount": { "type": "integer", "format": "int32", "nullable": true }, "moduleOSType": { "type": "string", "nullable": true }, "moduleCodegenBy": { "type": "string", "nullable": true }, "moduleClientSource": { "type": "string", "nullable": true }, "moduleIsBuiltin": { "type": "boolean" }, "moduleRegisterEventExtensionFields": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } }, "additionalProperties": false }, "RegisteredDataSetReference": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RegistrationOptions": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "datasetRegistrationOptions": { "$ref": "#/components/schemas/DatasetRegistrationOptions" } }, "additionalProperties": false }, "RegistryBlobReferenceData": { "type": "object", "properties": { "dataReferenceId": { "type": "string", "nullable": true }, "data": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RegistryIdentity": { "type": "object", "properties": { "resourceId": { "type": "string", "nullable": true }, "clientId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "Relationship": { "type": "object", "properties": { "relationType": { "type": "string", "nullable": true }, "targetEntityId": { "type": "string", "nullable": true }, "assetId": { "type": "string", "nullable": true }, "entityType": { "type": "string", "nullable": true, "readOnly": true }, "direction": { "type": "string", "nullable": true }, "entityContainerId": { "type": "string", "nullable": true, "readOnly": true } } }, "RemoteDockerComputeInfo": { "type": "object", "properties": { "address": { "type": "string", "nullable": true }, "username": { "type": "string", "nullable": true }, "password": { "type": "string", "nullable": true }, "privateKey": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ResourceConfig": { "type": "object", "properties": { "gpuCount": { "type": "integer", "format": "int32", "nullable": true }, "cpuCount": { "type": "integer", "format": "int32", "nullable": true }, "memoryRequestInGB": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "ResourceConfiguration": { "type": "object", "properties": { "gpuCount": { "type": "integer", "format": "int32", "nullable": true }, "cpuCount": { "type": "integer", "format": "int32", "nullable": true }, "memoryRequestInGB": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "ResourcesSetting": { "type": "object", "properties": { "instanceSize": { "type": "string", "nullable": true }, "sparkVersion": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RetrieveToolFuncResultRequest": { "type": "object", "properties": { "func_path": { "type": "string", "nullable": true }, "func_kwargs": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary", "nullable": true }, "func_call_scenario": { "$ref": "#/components/schemas/ToolFuncCallScenario" } }, "additionalProperties": false }, "RetryConfiguration": { "type": "object", "properties": { "maxRetryCount": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "RootError": { "type": "object", "properties": { "code": { "type": "string", "description": "The service-defined error code. Supported error codes: ServiceError, UserError, ValidationError, AzureStorageError, TransientError, RequestThrottled.", "nullable": true }, "severity": { "type": "integer", "description": "The Severity of error", "format": "int32", "nullable": true }, "message": { "type": "string", "description": "A human-readable representation of the error.", "nullable": true }, "messageFormat": { "type": "string", "description": "An unformatted version of the message with no variable substitution.", "nullable": true }, "messageParameters": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "description": "Value substitutions corresponding to the contents of MessageFormat.", "nullable": true }, "referenceCode": { "type": "string", "description": "This code can optionally be set by the system generating the error.\r\nIt should be used to classify the problem and identify the module and code area where the failure occured.", "nullable": true }, "detailsUri": { "type": "string", "description": "A URI which points to more details about the context of the error.", "format": "uri", "nullable": true }, "target": { "type": "string", "description": "The target of the error (e.g., the name of the property in error).", "nullable": true }, "details": { "type": "array", "items": { "$ref": "#/components/schemas/RootError" }, "description": "The related errors that occurred during the request.", "nullable": true }, "innerError": { "$ref": "#/components/schemas/InnerErrorResponse" }, "additionalInfo": { "type": "array", "items": { "$ref": "#/components/schemas/ErrorAdditionalInfo" }, "description": "The error additional info.", "nullable": true } }, "additionalProperties": false, "description": "The root error." }, "RunAnnotations": { "type": "object", "properties": { "displayName": { "type": "string", "nullable": true }, "status": { "type": "string", "nullable": true }, "primaryMetricName": { "type": "string", "nullable": true }, "estimatedCost": { "type": "number", "format": "double", "nullable": true }, "primaryMetricSummary": { "$ref": "#/components/schemas/RunIndexMetricSummary" }, "metrics": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/RunIndexMetricSummarySystemObject" }, "nullable": true }, "parameters": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "settings": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "modifiedTime": { "type": "string", "format": "date-time", "nullable": true }, "retainForLifetimeOfWorkspace": { "type": "boolean", "nullable": true }, "error": { "$ref": "#/components/schemas/IndexedErrorResponse" }, "resourceMetricSummary": { "$ref": "#/components/schemas/RunIndexResourceMetricSummary" }, "jobCost": { "$ref": "#/components/schemas/JobCost" }, "computeDuration": { "type": "string", "format": "date-span", "nullable": true }, "computeDurationMilliseconds": { "type": "number", "format": "double", "nullable": true }, "effectiveStartTimeUtc": { "type": "string", "format": "date-time", "nullable": true }, "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "archived": { "type": "boolean" }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } } }, "RunConfiguration": { "type": "object", "properties": { "script": { "type": "string", "nullable": true }, "scriptType": { "$ref": "#/components/schemas/ScriptType" }, "command": { "type": "string", "nullable": true }, "useAbsolutePath": { "type": "boolean" }, "arguments": { "type": "array", "items": { "type": "string" }, "nullable": true }, "framework": { "$ref": "#/components/schemas/Framework" }, "communicator": { "$ref": "#/components/schemas/Communicator" }, "target": { "type": "string", "nullable": true }, "autoClusterComputeSpecification": { "$ref": "#/components/schemas/AutoClusterComputeSpecification" }, "dataReferences": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataReferenceConfiguration" }, "nullable": true }, "data": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/Data" }, "nullable": true }, "inputAssets": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/InputAsset" }, "nullable": true }, "outputData": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/OutputData" }, "nullable": true }, "datacaches": { "type": "array", "items": { "$ref": "#/components/schemas/DatacacheConfiguration" }, "nullable": true }, "jobName": { "type": "string", "nullable": true }, "maxRunDurationSeconds": { "type": "integer", "format": "int64", "nullable": true }, "nodeCount": { "type": "integer", "format": "int32", "nullable": true }, "maxNodeCount": { "type": "integer", "format": "int32", "nullable": true }, "instanceTypes": { "type": "array", "items": { "type": "string" }, "nullable": true }, "priority": { "type": "integer", "format": "int32", "nullable": true }, "credentialPassthrough": { "type": "boolean" }, "identity": { "$ref": "#/components/schemas/IdentityConfiguration" }, "environment": { "$ref": "#/components/schemas/EnvironmentDefinition" }, "history": { "$ref": "#/components/schemas/HistoryConfiguration" }, "spark": { "$ref": "#/components/schemas/SparkConfiguration" }, "parallelTask": { "$ref": "#/components/schemas/ParallelTaskConfiguration" }, "tensorflow": { "$ref": "#/components/schemas/TensorflowConfiguration" }, "mpi": { "$ref": "#/components/schemas/MpiConfiguration" }, "pyTorch": { "$ref": "#/components/schemas/PyTorchConfiguration" }, "ray": { "$ref": "#/components/schemas/RayConfiguration" }, "hdi": { "$ref": "#/components/schemas/HdiConfiguration" }, "docker": { "$ref": "#/components/schemas/DockerConfiguration" }, "commandReturnCodeConfig": { "$ref": "#/components/schemas/CommandReturnCodeConfig" }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "applicationEndpoints": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ApplicationEndpointConfiguration" }, "nullable": true }, "parameters": { "type": "array", "items": { "$ref": "#/components/schemas/ParameterDefinition" }, "nullable": true }, "autologgerSettings": { "$ref": "#/components/schemas/AutologgerSettings" }, "dataBricks": { "$ref": "#/components/schemas/DatabricksConfiguration" }, "trainingDiagnosticConfig": { "$ref": "#/components/schemas/TrainingDiagnosticConfiguration" }, "secretsConfiguration": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/SecretConfiguration" }, "nullable": true } }, "additionalProperties": false }, "RunDatasetReference": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RunDefinition": { "type": "object", "properties": { "configuration": { "$ref": "#/components/schemas/RunConfiguration" }, "snapshotId": { "type": "string", "format": "uuid", "nullable": true }, "snapshots": { "type": "array", "items": { "$ref": "#/components/schemas/Snapshot" }, "nullable": true }, "parentRunId": { "type": "string", "nullable": true }, "runType": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "environmentAssetId": { "type": "string", "nullable": true }, "primaryMetricName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "cancelReason": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true } }, "additionalProperties": false }, "RunDetailsDto": { "type": "object", "properties": { "runId": { "type": "string", "nullable": true }, "runUuid": { "type": "string", "format": "uuid", "nullable": true }, "parentRunUuid": { "type": "string", "format": "uuid", "nullable": true }, "rootRunUuid": { "type": "string", "format": "uuid", "nullable": true }, "target": { "type": "string", "nullable": true }, "status": { "type": "string", "nullable": true }, "parentRunId": { "type": "string", "nullable": true }, "dataContainerId": { "type": "string", "nullable": true }, "createdTimeUtc": { "type": "string", "format": "date-time", "nullable": true }, "startTimeUtc": { "type": "string", "format": "date-time", "nullable": true }, "endTimeUtc": { "type": "string", "format": "date-time", "nullable": true }, "error": { "$ref": "#/components/schemas/ErrorResponse" }, "warnings": { "type": "array", "items": { "$ref": "#/components/schemas/RunDetailsWarningDto" }, "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "parameters": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "services": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/EndpointSetting" }, "description": "This is a dictionary", "nullable": true }, "inputDatasets": { "uniqueItems": true, "type": "array", "items": { "$ref": "#/components/schemas/DatasetLineage" }, "nullable": true }, "outputDatasets": { "uniqueItems": true, "type": "array", "items": { "$ref": "#/components/schemas/OutputDatasetLineage" }, "nullable": true }, "runDefinition": { "nullable": true }, "logFiles": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "jobCost": { "$ref": "#/components/schemas/JobCost" }, "revision": { "type": "integer", "format": "int64", "nullable": true }, "runTypeV2": { "$ref": "#/components/schemas/RunTypeV2" }, "settings": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "computeRequest": { "$ref": "#/components/schemas/ComputeRequest" }, "compute": { "$ref": "#/components/schemas/Compute" }, "createdBy": { "$ref": "#/components/schemas/User" }, "computeDuration": { "type": "string", "format": "date-span", "nullable": true }, "effectiveStartTimeUtc": { "type": "string", "format": "date-time", "nullable": true }, "runNumber": { "type": "integer", "format": "int32", "nullable": true }, "rootRunId": { "type": "string", "nullable": true }, "experimentId": { "type": "string", "nullable": true }, "userId": { "type": "string", "nullable": true }, "statusRevision": { "type": "integer", "format": "int64", "nullable": true }, "currentComputeTime": { "type": "string", "format": "date-span", "nullable": true }, "lastStartTimeUtc": { "type": "string", "format": "date-time", "nullable": true }, "lastModifiedBy": { "$ref": "#/components/schemas/User" }, "lastModifiedUtc": { "type": "string", "format": "date-time", "nullable": true }, "duration": { "type": "string", "format": "date-span", "nullable": true }, "inputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/TypedAssetReference" }, "nullable": true }, "outputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/TypedAssetReference" }, "nullable": true }, "currentAttemptId": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "RunDetailsWarningDto": { "type": "object", "properties": { "source": { "type": "string", "nullable": true }, "message": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RunDisplayNameGenerationType": { "enum": [ "AutoAppend", "UserProvidedMacro" ], "type": "string" }, "RunDto": { "type": "object", "properties": { "runNumber": { "type": "integer", "format": "int32", "nullable": true }, "rootRunId": { "type": "string", "nullable": true }, "createdUtc": { "type": "string", "format": "date-time", "nullable": true }, "createdBy": { "$ref": "#/components/schemas/User" }, "userId": { "type": "string", "nullable": true }, "token": { "type": "string", "nullable": true }, "tokenExpiryTimeUtc": { "type": "string", "format": "date-time", "nullable": true }, "error": { "$ref": "#/components/schemas/ErrorResponse" }, "warnings": { "type": "array", "items": { "$ref": "#/components/schemas/RunDetailsWarningDto" }, "nullable": true }, "revision": { "type": "integer", "format": "int64", "nullable": true }, "statusRevision": { "type": "integer", "format": "int64", "nullable": true }, "runUuid": { "type": "string", "format": "uuid", "nullable": true }, "parentRunUuid": { "type": "string", "format": "uuid", "nullable": true }, "rootRunUuid": { "type": "string", "format": "uuid", "nullable": true }, "lastStartTimeUtc": { "type": "string", "format": "date-time", "nullable": true }, "currentComputeTime": { "type": "string", "format": "date-span", "nullable": true }, "computeDuration": { "type": "string", "format": "date-span", "nullable": true }, "effectiveStartTimeUtc": { "type": "string", "format": "date-time", "nullable": true }, "lastModifiedBy": { "$ref": "#/components/schemas/User" }, "lastModifiedUtc": { "type": "string", "format": "date-time", "nullable": true }, "duration": { "type": "string", "format": "date-span", "nullable": true }, "cancelationReason": { "type": "string", "nullable": true }, "currentAttemptId": { "type": "integer", "format": "int32", "nullable": true }, "runId": { "type": "string", "nullable": true }, "parentRunId": { "type": "string", "nullable": true }, "experimentId": { "type": "string", "nullable": true }, "status": { "type": "string", "nullable": true }, "startTimeUtc": { "type": "string", "format": "date-time", "nullable": true }, "endTimeUtc": { "type": "string", "format": "date-time", "nullable": true }, "scheduleId": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "dataContainerId": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "hidden": { "type": "boolean", "nullable": true }, "runType": { "type": "string", "nullable": true }, "runTypeV2": { "$ref": "#/components/schemas/RunTypeV2" }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "parameters": { "type": "object", "additionalProperties": { "nullable": true }, "nullable": true }, "actionUris": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "scriptName": { "type": "string", "nullable": true }, "target": { "type": "string", "nullable": true }, "uniqueChildRunComputeTargets": { "uniqueItems": true, "type": "array", "items": { "type": "string" }, "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "settings": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "services": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/EndpointSetting" }, "nullable": true }, "inputDatasets": { "uniqueItems": true, "type": "array", "items": { "$ref": "#/components/schemas/DatasetLineage" }, "nullable": true }, "outputDatasets": { "uniqueItems": true, "type": "array", "items": { "$ref": "#/components/schemas/OutputDatasetLineage" }, "nullable": true }, "runDefinition": { "nullable": true }, "jobSpecification": { "nullable": true }, "primaryMetricName": { "type": "string", "nullable": true }, "createdFrom": { "$ref": "#/components/schemas/CreatedFromDto" }, "cancelUri": { "type": "string", "nullable": true }, "completeUri": { "type": "string", "nullable": true }, "diagnosticsUri": { "type": "string", "nullable": true }, "computeRequest": { "$ref": "#/components/schemas/ComputeRequest" }, "compute": { "$ref": "#/components/schemas/Compute" }, "retainForLifetimeOfWorkspace": { "type": "boolean", "nullable": true }, "queueingInfo": { "$ref": "#/components/schemas/QueueingInfo" }, "inputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/TypedAssetReference" }, "nullable": true }, "outputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/TypedAssetReference" }, "nullable": true } }, "additionalProperties": false }, "RunIndexEntity": { "type": "object", "properties": { "schemaId": { "type": "string", "nullable": true }, "entityId": { "type": "string", "nullable": true }, "kind": { "$ref": "#/components/schemas/EntityKind" }, "annotations": { "$ref": "#/components/schemas/RunAnnotations" }, "properties": { "$ref": "#/components/schemas/RunProperties" }, "internal": { "$ref": "#/components/schemas/ExtensibleObject" }, "updateSequence": { "type": "integer", "format": "int64" }, "type": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true, "readOnly": true }, "entityContainerId": { "type": "string", "nullable": true, "readOnly": true }, "entityObjectId": { "type": "string", "nullable": true, "readOnly": true }, "resourceType": { "type": "string", "nullable": true, "readOnly": true }, "relationships": { "type": "array", "items": { "$ref": "#/components/schemas/Relationship" }, "nullable": true }, "assetId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RunIndexMetricSummary": { "type": "object", "properties": { "count": { "type": "integer", "format": "int64" }, "lastValue": { "nullable": true }, "minimumValue": { "nullable": true }, "maximumValue": { "nullable": true }, "metricType": { "type": "string", "nullable": true } } }, "RunIndexMetricSummarySystemObject": { "type": "object", "properties": { "count": { "type": "integer", "format": "int64" }, "lastValue": { "nullable": true }, "minimumValue": { "nullable": true }, "maximumValue": { "nullable": true }, "metricType": { "type": "string", "nullable": true } } }, "RunIndexResourceMetricSummary": { "type": "object", "properties": { "gpuUtilizationPercentLastHour": { "type": "number", "format": "double", "nullable": true }, "gpuMemoryUtilizationPercentLastHour": { "type": "number", "format": "double", "nullable": true }, "gpuEnergyJoules": { "type": "number", "format": "double", "nullable": true }, "resourceMetricNames": { "type": "array", "items": { "type": "string" }, "nullable": true } } }, "RunMetricDto": { "type": "object", "properties": { "runId": { "type": "string", "nullable": true }, "metricId": { "type": "string", "format": "uuid" }, "dataContainerId": { "type": "string", "nullable": true }, "metricType": { "type": "string", "nullable": true }, "createdUtc": { "type": "string", "format": "date-time", "nullable": true }, "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "label": { "type": "string", "nullable": true }, "numCells": { "type": "integer", "format": "int32" }, "dataLocation": { "type": "string", "nullable": true }, "cells": { "type": "array", "items": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary" }, "nullable": true }, "schema": { "$ref": "#/components/schemas/MetricSchemaDto" } }, "additionalProperties": false }, "RunMetricsTypesDto": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "type": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RunProperties": { "type": "object", "properties": { "dataContainerId": { "type": "string", "nullable": true }, "targetName": { "type": "string", "nullable": true }, "runName": { "type": "string", "nullable": true }, "experimentName": { "type": "string", "nullable": true }, "runId": { "type": "string", "nullable": true }, "parentRunId": { "type": "string", "nullable": true }, "rootRunId": { "type": "string", "nullable": true }, "runType": { "type": "string", "nullable": true }, "runTypeV2": { "$ref": "#/components/schemas/RunTypeV2Index" }, "scriptName": { "type": "string", "nullable": true }, "experimentId": { "type": "string", "nullable": true }, "runUuid": { "type": "string", "format": "uuid", "nullable": true }, "parentRunUuid": { "type": "string", "format": "uuid", "nullable": true }, "runNumber": { "type": "integer", "format": "int32" }, "startTime": { "type": "string", "format": "date-time", "nullable": true }, "endTime": { "type": "string", "format": "date-time", "nullable": true }, "computeRequest": { "$ref": "#/components/schemas/ComputeRequest" }, "compute": { "$ref": "#/components/schemas/Compute" }, "userProperties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "actionUris": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "duration": { "type": "string", "format": "date-span", "nullable": true }, "durationMilliseconds": { "type": "number", "format": "double", "nullable": true }, "creationContext": { "$ref": "#/components/schemas/CreationContext" } } }, "RunSettingParameter": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "label": { "type": "string", "nullable": true }, "parameterType": { "$ref": "#/components/schemas/RunSettingParameterType" }, "isOptional": { "type": "boolean", "nullable": true }, "defaultValue": { "type": "string", "nullable": true }, "lowerBound": { "type": "string", "nullable": true }, "upperBound": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "runSettingUIHint": { "$ref": "#/components/schemas/RunSettingUIParameterHint" }, "argumentName": { "type": "string", "nullable": true }, "sectionName": { "type": "string", "nullable": true }, "sectionDescription": { "type": "string", "nullable": true }, "sectionArgumentName": { "type": "string", "nullable": true }, "examples": { "type": "array", "items": { "type": "string" }, "nullable": true }, "enumValues": { "type": "array", "items": { "type": "string" }, "nullable": true }, "enumValuesToArgumentStrings": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "enabledByParameterName": { "type": "string", "nullable": true }, "enabledByParameterValues": { "type": "array", "items": { "type": "string" }, "nullable": true }, "disabledByParameters": { "type": "array", "items": { "type": "string" }, "nullable": true }, "moduleRunSettingType": { "$ref": "#/components/schemas/ModuleRunSettingTypes" }, "linkedParameterDefaultValueMapping": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "linkedParameterKeyName": { "type": "string", "nullable": true }, "supportLinkSetting": { "type": "boolean" } }, "additionalProperties": false }, "RunSettingParameterAssignment": { "type": "object", "properties": { "useGraphDefaultCompute": { "type": "boolean", "nullable": true }, "mlcComputeType": { "type": "string", "nullable": true }, "computeRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameterAssignment" }, "nullable": true }, "linkedParameterName": { "type": "string", "nullable": true }, "valueType": { "$ref": "#/components/schemas/ParameterValueType" }, "assignmentsToConcatenate": { "type": "array", "items": { "$ref": "#/components/schemas/ParameterAssignment" }, "nullable": true }, "dataPathAssignment": { "$ref": "#/components/schemas/LegacyDataPath" }, "dataSetDefinitionValueAssignment": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "name": { "type": "string", "nullable": true }, "value": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RunSettingParameterType": { "enum": [ "Undefined", "Int", "Double", "Bool", "String", "JsonString", "YamlString", "StringList" ], "type": "string" }, "RunSettingUIParameterHint": { "type": "object", "properties": { "uiWidgetType": { "$ref": "#/components/schemas/RunSettingUIWidgetTypeEnum" }, "jsonEditor": { "$ref": "#/components/schemas/UIJsonEditor" }, "yamlEditor": { "$ref": "#/components/schemas/UIYamlEditor" }, "computeSelection": { "$ref": "#/components/schemas/UIComputeSelection" }, "hyperparameterConfiguration": { "$ref": "#/components/schemas/UIHyperparameterConfiguration" }, "uxIgnore": { "type": "boolean" }, "anonymous": { "type": "boolean" }, "supportReset": { "type": "boolean" } }, "additionalProperties": false }, "RunSettingUIWidgetTypeEnum": { "enum": [ "Default", "ComputeSelection", "JsonEditor", "Mode", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "Checkbox", "MultipleSelection", "HyperparameterConfiguration", "JsonTextBox", "Connection", "Static" ], "type": "string" }, "RunStatus": { "enum": [ "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled" ], "type": "string" }, "RunStatusPeriod": { "type": "object", "properties": { "status": { "$ref": "#/components/schemas/RunStatus" }, "subPeriods": { "type": "array", "items": { "$ref": "#/components/schemas/SubStatusPeriod" }, "nullable": true }, "start": { "type": "integer", "format": "int64", "nullable": true }, "end": { "type": "integer", "format": "int64", "nullable": true } }, "additionalProperties": false }, "RunType": { "enum": [ "HTTP", "SDK", "Schedule", "Portal" ], "type": "string" }, "RunTypeV2": { "type": "object", "properties": { "orchestrator": { "type": "string", "nullable": true }, "traits": { "uniqueItems": true, "type": "array", "items": { "type": "string" }, "nullable": true }, "attribution": { "type": "string", "nullable": true }, "computeType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RunTypeV2Index": { "type": "object", "properties": { "orchestrator": { "type": "string", "nullable": true }, "traits": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "attribution": { "type": "string", "nullable": true }, "computeType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RuntimeConfiguration": { "type": "object", "properties": { "baseImage": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true } }, "additionalProperties": false }, "RuntimeStatusEnum": { "enum": [ "Unavailable", "Failed", "NotExist", "Starting", "Stopping" ], "type": "string" }, "RuntimeType": { "enum": [ "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession" ], "type": "string" }, "SampleMeta": { "type": "object", "properties": { "image": { "type": "string", "nullable": true }, "id": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "docLink": { "type": "string", "nullable": true }, "tags": { "type": "array", "items": { "type": "string" }, "nullable": true }, "createdAt": { "type": "string", "format": "date-time" }, "updatedAt": { "type": "string", "format": "date-time" }, "feedName": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true } }, "additionalProperties": false }, "SamplingAlgorithmType": { "enum": [ "Random", "Grid", "Bayesian" ], "type": "string" }, "SavePipelineDraftRequest": { "type": "object", "properties": { "uiWidgetMetaInfos": { "type": "array", "items": { "$ref": "#/components/schemas/UIWidgetMetaInfo" }, "nullable": true }, "webServiceInputs": { "type": "array", "items": { "$ref": "#/components/schemas/WebServicePort" }, "nullable": true }, "webServiceOutputs": { "type": "array", "items": { "$ref": "#/components/schemas/WebServicePort" }, "nullable": true }, "nodesInDraft": { "type": "array", "items": { "type": "string" }, "nullable": true }, "name": { "type": "string", "nullable": true }, "pipelineType": { "$ref": "#/components/schemas/PipelineType" }, "pipelineDraftMode": { "$ref": "#/components/schemas/PipelineDraftMode" }, "graphComponentsMode": { "$ref": "#/components/schemas/GraphComponentsMode" }, "subPipelinesInfo": { "$ref": "#/components/schemas/SubPipelinesInfo" }, "flattenedSubGraphs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/PipelineSubDraft" }, "nullable": true }, "pipelineParameters": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataPathAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/LegacyDataPath" }, "description": "This is a dictionary", "nullable": true }, "dataSetDefinitionValueAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "description": "This is a dictionary", "nullable": true }, "assetOutputSettingsAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AssetOutputSettings" }, "description": "This is a dictionary", "nullable": true }, "graph": { "$ref": "#/components/schemas/GraphDraftEntity" }, "pipelineRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameterAssignment" }, "nullable": true }, "moduleNodeRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeRunSetting" }, "nullable": true }, "moduleNodeUIInputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeUIInputSetting" }, "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "continueRunOnStepFailure": { "type": "boolean", "nullable": true }, "description": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "enforceRerun": { "type": "boolean", "nullable": true }, "datasetAccessModes": { "$ref": "#/components/schemas/DatasetAccessModes" } }, "additionalProperties": false }, "SavedDataSetReference": { "type": "object", "properties": { "id": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ScheduleBase": { "type": "object", "properties": { "scheduleStatus": { "$ref": "#/components/schemas/MfeInternalScheduleStatus" }, "scheduleType": { "$ref": "#/components/schemas/ScheduleType" }, "endTime": { "type": "string", "format": "date-time", "nullable": true }, "startTime": { "type": "string", "format": "date-time", "nullable": true }, "timeZone": { "type": "string", "nullable": true }, "expression": { "type": "string", "nullable": true }, "frequency": { "$ref": "#/components/schemas/RecurrenceFrequency" }, "interval": { "type": "integer", "format": "int32" }, "pattern": { "$ref": "#/components/schemas/RecurrencePattern" } }, "additionalProperties": false }, "ScheduleProvisioningStatus": { "enum": [ "Creating", "Updating", "Deleting", "Succeeded", "Failed", "Canceled" ], "type": "string" }, "ScheduleStatus": { "enum": [ "Enabled", "Disabled" ], "type": "string" }, "ScheduleType": { "enum": [ "Cron", "Recurrence" ], "type": "string" }, "SchemaContractsCreatedBy": { "type": "object", "properties": { "userObjectId": { "type": "string", "nullable": true }, "userTenantId": { "type": "string", "nullable": true }, "userName": { "type": "string", "nullable": true }, "userPrincipalName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ScopeCloudConfiguration": { "type": "object", "properties": { "inputPathSuffixes": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ArgumentAssignment" }, "description": "This is a dictionary", "nullable": true }, "outputPathSuffixes": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ArgumentAssignment" }, "description": "This is a dictionary", "nullable": true }, "userAlias": { "type": "string", "nullable": true }, "tokens": { "type": "integer", "format": "int32", "nullable": true }, "autoToken": { "type": "integer", "format": "int32", "nullable": true }, "vcp": { "type": "number", "format": "float", "nullable": true } }, "additionalProperties": false }, "ScopeType": { "enum": [ "Global", "Tenant", "Subscription", "ResourceGroup", "Workspace" ], "type": "string" }, "ScriptType": { "enum": [ "Python", "Notebook" ], "type": "string" }, "Seasonality": { "type": "object", "properties": { "mode": { "$ref": "#/components/schemas/SeasonalityMode" }, "value": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "SeasonalityMode": { "enum": [ "Auto", "Custom" ], "type": "string" }, "SecretConfiguration": { "type": "object", "properties": { "workspace_secret_name": { "type": "string", "nullable": true }, "uri": { "type": "string", "nullable": true } }, "additionalProperties": false }, "Section": { "enum": [ "Gallery", "Template" ], "type": "string" }, "SegmentedResult`1": { "type": "object", "properties": { "value": { "type": "array", "items": { "$ref": "#/components/schemas/FlowIndexEntity" }, "nullable": true }, "continuationToken": { "type": "string", "nullable": true }, "count": { "type": "integer", "format": "int32", "nullable": true }, "nextLink": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ServiceLogRequest": { "type": "object", "properties": { "logLevel": { "$ref": "#/components/schemas/LogLevel" }, "message": { "type": "string", "nullable": true }, "timestamp": { "type": "string", "format": "date-time", "nullable": true } }, "additionalProperties": false }, "SessionApplication": { "type": "object", "properties": { "image": { "type": "string", "nullable": true }, "envVars": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "pythonPipRequirements": { "type": "array", "items": { "type": "string" }, "nullable": true }, "setupResults": { "type": "array", "items": { "$ref": "#/components/schemas/SessionApplicationRunCommandResult" }, "nullable": true } }, "additionalProperties": false }, "SessionApplicationRunCommandResult": { "type": "object", "properties": { "command": { "type": "string", "nullable": true }, "arguments": { "type": "array", "items": { "type": "string" }, "nullable": true }, "exitCode": { "type": "integer", "format": "int32" }, "stdOut": { "type": "string", "nullable": true }, "stdErr": { "type": "string", "nullable": true } }, "additionalProperties": false }, "SessionProperties": { "type": "object", "properties": { "sessionId": { "type": "string", "nullable": true }, "subscriptionId": { "type": "string", "nullable": true }, "resourceGroupName": { "type": "string", "nullable": true }, "workspaceName": { "type": "string", "nullable": true }, "userObjectId": { "type": "string", "nullable": true }, "userTenantId": { "type": "string", "nullable": true }, "vmSize": { "type": "string", "nullable": true }, "maxIdleTimeSeconds": { "type": "integer", "format": "int64" }, "application": { "$ref": "#/components/schemas/SessionApplication" }, "lastAliveTime": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "SessionSetupModeEnum": { "enum": [ "ClientWait", "SystemWait" ], "type": "string" }, "SetupFlowSessionAction": { "enum": [ "Install", "Reset", "Update", "Delete" ], "type": "string" }, "SetupFlowSessionRequest": { "type": "object", "properties": { "action": { "$ref": "#/components/schemas/SetupFlowSessionAction" }, "vmSize": { "type": "string", "nullable": true }, "maxIdleTimeSeconds": { "type": "integer", "format": "int64", "nullable": true }, "identity": { "type": "string", "nullable": true } }, "additionalProperties": false }, "SeverityLevel": { "enum": [ "Critical", "Error", "Warning", "Info" ], "type": "string" }, "SharingScope": { "type": "object", "properties": { "type": { "$ref": "#/components/schemas/ScopeType" }, "identifier": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ShortSeriesHandlingConfiguration": { "enum": [ "Auto", "Pad", "Drop" ], "type": "string" }, "Snapshot": { "type": "object", "properties": { "id": { "type": "string", "format": "uuid", "nullable": true }, "directoryName": { "type": "string", "nullable": true }, "snapshotAssetId": { "type": "string", "nullable": true }, "snapshotEntityId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "SnapshotInfo": { "type": "object", "properties": { "rootDownloadUrl": { "type": "string", "nullable": true }, "snapshots": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DownloadResourceInfo" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "SourceCodeDataReference": { "type": "object", "properties": { "dataStoreName": { "type": "string", "nullable": true }, "path": { "type": "string", "nullable": true } }, "additionalProperties": false }, "SparkConfiguration": { "type": "object", "properties": { "configuration": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "files": { "type": "array", "items": { "type": "string" }, "nullable": true }, "archives": { "type": "array", "items": { "type": "string" }, "nullable": true }, "jars": { "type": "array", "items": { "type": "string" }, "nullable": true }, "pyFiles": { "type": "array", "items": { "type": "string" }, "nullable": true }, "sparkPoolResourceId": { "type": "string", "nullable": true } }, "additionalProperties": false }, "SparkJarTaskDto": { "type": "object", "properties": { "main_class_name": { "type": "string", "nullable": true }, "parameters": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "SparkJob": { "type": "object", "properties": { "jobType": { "$ref": "#/components/schemas/JobType" }, "resources": { "$ref": "#/components/schemas/SparkResourceConfiguration" }, "args": { "type": "string", "nullable": true }, "codeId": { "type": "string", "nullable": true }, "entry": { "$ref": "#/components/schemas/SparkJobEntry" }, "pyFiles": { "type": "array", "items": { "type": "string" }, "nullable": true }, "jars": { "type": "array", "items": { "type": "string" }, "nullable": true }, "files": { "type": "array", "items": { "type": "string" }, "nullable": true }, "archives": { "type": "array", "items": { "type": "string" }, "nullable": true }, "environmentId": { "type": "string", "nullable": true }, "inputDataBindings": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/InputDataBinding" }, "nullable": true }, "outputDataBindings": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/OutputDataBinding" }, "nullable": true }, "conf": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "provisioningState": { "$ref": "#/components/schemas/JobProvisioningState" }, "parentJobName": { "type": "string", "nullable": true }, "displayName": { "type": "string", "nullable": true }, "experimentName": { "type": "string", "nullable": true }, "status": { "$ref": "#/components/schemas/JobStatus" }, "interactionEndpoints": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/JobEndpoint" }, "nullable": true }, "identity": { "$ref": "#/components/schemas/MfeInternalIdentityConfiguration" }, "compute": { "$ref": "#/components/schemas/ComputeConfiguration" }, "priority": { "type": "integer", "format": "int32", "nullable": true }, "output": { "$ref": "#/components/schemas/JobOutputArtifacts" }, "isArchived": { "type": "boolean" }, "schedule": { "$ref": "#/components/schemas/ScheduleBase" }, "componentId": { "type": "string", "nullable": true }, "notificationSetting": { "$ref": "#/components/schemas/NotificationSetting" }, "secretsConfiguration": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/MfeInternalSecretConfiguration" }, "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "SparkJobEntry": { "type": "object", "properties": { "file": { "type": "string", "nullable": true }, "className": { "type": "string", "nullable": true } }, "additionalProperties": false }, "SparkMavenPackage": { "type": "object", "properties": { "group": { "type": "string", "nullable": true }, "artifact": { "type": "string", "nullable": true }, "version": { "type": "string", "nullable": true } }, "additionalProperties": false }, "SparkPythonTaskDto": { "type": "object", "properties": { "python_file": { "type": "string", "nullable": true }, "parameters": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "SparkResourceConfiguration": { "type": "object", "properties": { "instanceType": { "type": "string", "nullable": true }, "runtimeVersion": { "type": "string", "nullable": true } }, "additionalProperties": false }, "SparkSection": { "type": "object", "properties": { "repositories": { "type": "array", "items": { "type": "string" }, "nullable": true }, "packages": { "type": "array", "items": { "$ref": "#/components/schemas/SparkMavenPackage" }, "nullable": true }, "precachePackages": { "type": "boolean" } }, "additionalProperties": false }, "SparkSubmitTaskDto": { "type": "object", "properties": { "parameters": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "SqlDataPath": { "type": "object", "properties": { "sqlTableName": { "type": "string", "nullable": true }, "sqlQuery": { "type": "string", "nullable": true }, "sqlStoredProcedureName": { "type": "string", "nullable": true }, "sqlStoredProcedureParams": { "type": "array", "items": { "$ref": "#/components/schemas/StoredProcedureParameter" }, "nullable": true } }, "additionalProperties": false }, "StackEnsembleSettings": { "type": "object", "properties": { "stackMetaLearnerType": { "$ref": "#/components/schemas/StackMetaLearnerType" }, "stackMetaLearnerTrainPercentage": { "type": "number", "format": "double", "nullable": true }, "stackMetaLearnerKWargs": { "nullable": true } }, "additionalProperties": false }, "StackMetaLearnerType": { "enum": [ "None", "LogisticRegression", "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", "LightGBMRegressor", "LinearRegression" ], "type": "string" }, "StandbyPoolProperties": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "count": { "type": "integer", "format": "int32" }, "vmSize": { "type": "string", "nullable": true }, "standbyAvailableInstances": { "type": "array", "items": { "$ref": "#/components/schemas/StandbyPoolResourceStatus" }, "nullable": true } }, "additionalProperties": false }, "StandbyPoolResourceStatus": { "type": "object", "properties": { "status": { "type": "string", "nullable": true }, "error": { "$ref": "#/components/schemas/CloudError" } }, "additionalProperties": false }, "StartRunResult": { "required": [ "runId" ], "type": "object", "properties": { "runId": { "minLength": 1, "type": "string" } }, "additionalProperties": false }, "StepRunProfile": { "type": "object", "properties": { "stepRunId": { "type": "string", "nullable": true }, "stepRunNumber": { "type": "integer", "format": "int32", "nullable": true }, "runUrl": { "type": "string", "nullable": true }, "computeTarget": { "type": "string", "nullable": true }, "computeTargetUrl": { "type": "string", "nullable": true }, "nodeId": { "type": "string", "nullable": true }, "nodeName": { "type": "string", "nullable": true }, "stepName": { "type": "string", "nullable": true }, "createTime": { "type": "integer", "format": "int64", "nullable": true }, "startTime": { "type": "integer", "format": "int64", "nullable": true }, "endTime": { "type": "integer", "format": "int64", "nullable": true }, "status": { "$ref": "#/components/schemas/RunStatus" }, "statusDetail": { "type": "string", "nullable": true }, "isReused": { "type": "boolean" }, "reusedPipelineRunId": { "type": "string", "nullable": true }, "reusedStepRunId": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "statusTimeline": { "type": "array", "items": { "$ref": "#/components/schemas/RunStatusPeriod" }, "nullable": true } }, "additionalProperties": false }, "StorageAuthType": { "enum": [ "MSI", "ConnectionString", "SAS" ], "type": "string" }, "StorageInfo": { "type": "object", "properties": { "storageAuthType": { "$ref": "#/components/schemas/StorageAuthType" }, "connectionString": { "type": "string", "nullable": true }, "sasToken": { "type": "string", "nullable": true }, "accountName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "StoredProcedureParameter": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "value": { "type": "string", "nullable": true }, "type": { "$ref": "#/components/schemas/StoredProcedureParameterType" } }, "additionalProperties": false }, "StoredProcedureParameterType": { "enum": [ "String", "Int", "Decimal", "Guid", "Boolean", "Date" ], "type": "string" }, "Stream": { "type": "object", "properties": { "canRead": { "type": "boolean", "readOnly": true }, "canWrite": { "type": "boolean", "readOnly": true }, "canSeek": { "type": "boolean", "readOnly": true }, "canTimeout": { "type": "boolean", "readOnly": true }, "length": { "type": "integer", "format": "int64", "readOnly": true }, "position": { "type": "integer", "format": "int64" }, "readTimeout": { "type": "integer", "format": "int32" }, "writeTimeout": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "StructuredInterface": { "type": "object", "properties": { "commandLinePattern": { "type": "string", "nullable": true }, "inputs": { "type": "array", "items": { "$ref": "#/components/schemas/StructuredInterfaceInput" }, "nullable": true }, "outputs": { "type": "array", "items": { "$ref": "#/components/schemas/StructuredInterfaceOutput" }, "nullable": true }, "controlOutputs": { "type": "array", "items": { "$ref": "#/components/schemas/ControlOutput" }, "nullable": true }, "parameters": { "type": "array", "items": { "$ref": "#/components/schemas/StructuredInterfaceParameter" }, "nullable": true }, "metadataParameters": { "type": "array", "items": { "$ref": "#/components/schemas/StructuredInterfaceParameter" }, "nullable": true }, "arguments": { "type": "array", "items": { "$ref": "#/components/schemas/ArgumentAssignment" }, "nullable": true } }, "additionalProperties": false }, "StructuredInterfaceInput": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "label": { "type": "string", "nullable": true }, "dataTypeIdsList": { "type": "array", "items": { "type": "string" }, "nullable": true }, "isOptional": { "type": "boolean" }, "description": { "type": "string", "nullable": true }, "skipProcessing": { "type": "boolean" }, "isResource": { "type": "boolean" }, "dataStoreMode": { "$ref": "#/components/schemas/AEVADataStoreMode" }, "pathOnCompute": { "type": "string", "nullable": true }, "overwrite": { "type": "boolean" }, "dataReferenceName": { "type": "string", "nullable": true }, "datasetTypes": { "uniqueItems": true, "type": "array", "items": { "$ref": "#/components/schemas/DatasetType" }, "nullable": true } }, "additionalProperties": false }, "StructuredInterfaceOutput": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "label": { "type": "string", "nullable": true }, "dataTypeId": { "type": "string", "nullable": true }, "passThroughDataTypeInputName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "skipProcessing": { "type": "boolean" }, "IsArtifact": { "type": "boolean" }, "dataStoreName": { "type": "string", "nullable": true }, "dataStoreMode": { "$ref": "#/components/schemas/AEVADataStoreMode" }, "pathOnCompute": { "type": "string", "nullable": true }, "overwrite": { "type": "boolean" }, "dataReferenceName": { "type": "string", "nullable": true }, "trainingOutput": { "$ref": "#/components/schemas/TrainingOutput" }, "datasetOutput": { "$ref": "#/components/schemas/DatasetOutput" }, "AssetOutputSettings": { "$ref": "#/components/schemas/AssetOutputSettings" }, "EarlyAvailable": { "type": "boolean" } }, "additionalProperties": false }, "StructuredInterfaceParameter": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "label": { "type": "string", "nullable": true }, "parameterType": { "$ref": "#/components/schemas/ParameterType" }, "isOptional": { "type": "boolean" }, "defaultValue": { "type": "string", "nullable": true }, "lowerBound": { "type": "string", "nullable": true }, "upperBound": { "type": "string", "nullable": true }, "enumValues": { "type": "array", "items": { "type": "string" }, "nullable": true }, "enumValuesToArgumentStrings": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "description": { "type": "string", "nullable": true }, "setEnvironmentVariable": { "type": "boolean" }, "environmentVariableOverride": { "type": "string", "nullable": true }, "enabledByParameterName": { "type": "string", "nullable": true }, "enabledByParameterValues": { "type": "array", "items": { "type": "string" }, "nullable": true }, "uiHint": { "$ref": "#/components/schemas/UIParameterHint" }, "groupNames": { "type": "array", "items": { "type": "string" }, "nullable": true }, "argumentName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "StudioMigrationInfo": { "type": "object", "properties": { "sourceWorkspaceId": { "type": "string", "nullable": true }, "sourceExperimentId": { "type": "string", "nullable": true }, "sourceExperimentLink": { "type": "string", "nullable": true }, "failedNodeIdList": { "type": "array", "items": { "type": "string" }, "nullable": true }, "errorMessage": { "type": "string", "nullable": true, "readOnly": true } }, "additionalProperties": false }, "SubGraphConcatenateAssignment": { "type": "object", "properties": { "concatenateParameter": { "type": "array", "items": { "$ref": "#/components/schemas/ParameterAssignment" }, "nullable": true }, "parameterAssignments": { "$ref": "#/components/schemas/SubPipelineParameterAssignment" } }, "additionalProperties": false }, "SubGraphConfiguration": { "type": "object", "properties": { "graphId": { "type": "string", "nullable": true }, "graphDraftId": { "type": "string", "nullable": true }, "DefaultCloudPriority": { "$ref": "#/components/schemas/CloudPrioritySetting" }, "IsDynamic": { "type": "boolean", "default": false, "nullable": true } }, "additionalProperties": false }, "SubGraphConnectionInfo": { "type": "object", "properties": { "nodeId": { "type": "string", "nullable": true }, "portName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "SubGraphDataPathParameterAssignment": { "type": "object", "properties": { "dataSetPathParameter": { "$ref": "#/components/schemas/DataSetPathParameter" }, "dataSetPathParameterAssignments": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "SubGraphInfo": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "defaultComputeTarget": { "$ref": "#/components/schemas/ComputeSetting" }, "defaultDataStore": { "$ref": "#/components/schemas/DatastoreSetting" }, "id": { "type": "string", "nullable": true }, "parentGraphId": { "type": "string", "nullable": true }, "pipelineDefinitionId": { "type": "string", "nullable": true }, "subGraphParameterAssignment": { "type": "array", "items": { "$ref": "#/components/schemas/SubGraphParameterAssignment" }, "nullable": true }, "subGraphConcatenateAssignment": { "type": "array", "items": { "$ref": "#/components/schemas/SubGraphConcatenateAssignment" }, "nullable": true }, "subGraphDataPathParameterAssignment": { "type": "array", "items": { "$ref": "#/components/schemas/SubGraphDataPathParameterAssignment" }, "nullable": true }, "subGraphDefaultComputeTargetNodes": { "type": "array", "items": { "type": "string" }, "nullable": true }, "subGraphDefaultDataStoreNodes": { "type": "array", "items": { "type": "string" }, "nullable": true }, "inputs": { "type": "array", "items": { "$ref": "#/components/schemas/SubGraphPortInfo" }, "nullable": true }, "outputs": { "type": "array", "items": { "$ref": "#/components/schemas/SubGraphPortInfo" }, "nullable": true } }, "additionalProperties": false }, "SubGraphParameterAssignment": { "type": "object", "properties": { "parameter": { "$ref": "#/components/schemas/Parameter" }, "parameterAssignments": { "type": "array", "items": { "$ref": "#/components/schemas/SubPipelineParameterAssignment" }, "nullable": true } }, "additionalProperties": false }, "SubGraphPortInfo": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "internal": { "type": "array", "items": { "$ref": "#/components/schemas/SubGraphConnectionInfo" }, "nullable": true }, "external": { "type": "array", "items": { "$ref": "#/components/schemas/SubGraphConnectionInfo" }, "nullable": true } }, "additionalProperties": false }, "SubPipelineDefinition": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "defaultComputeTarget": { "$ref": "#/components/schemas/ComputeSetting" }, "defaultDataStore": { "$ref": "#/components/schemas/DatastoreSetting" }, "pipelineFunctionName": { "type": "string", "nullable": true }, "id": { "type": "string", "nullable": true }, "parentDefinitionId": { "type": "string", "nullable": true }, "fromModuleName": { "type": "string", "nullable": true }, "parameterList": { "type": "array", "items": { "$ref": "#/components/schemas/Kwarg" }, "nullable": true } }, "additionalProperties": false }, "SubPipelineParameterAssignment": { "type": "object", "properties": { "nodeId": { "type": "string", "nullable": true }, "parameterName": { "type": "string", "nullable": true } }, "additionalProperties": false }, "SubPipelinesInfo": { "type": "object", "properties": { "subGraphInfo": { "type": "array", "items": { "$ref": "#/components/schemas/SubGraphInfo" }, "nullable": true }, "nodeIdToSubGraphIdMapping": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "subPipelineDefinition": { "type": "array", "items": { "$ref": "#/components/schemas/SubPipelineDefinition" }, "nullable": true } }, "additionalProperties": false }, "SubStatusPeriod": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "subPeriods": { "type": "array", "items": { "$ref": "#/components/schemas/SubStatusPeriod" }, "nullable": true }, "start": { "type": "integer", "format": "int64", "nullable": true }, "end": { "type": "integer", "format": "int64", "nullable": true } }, "additionalProperties": false }, "SubmitBulkRunRequest": { "type": "object", "properties": { "flowDefinitionFilePath": { "type": "string", "nullable": true }, "flowDefinitionResourceId": { "type": "string", "nullable": true }, "flowDefinitionDataStoreName": { "type": "string", "nullable": true }, "flowDefinitionBlobPath": { "type": "string", "nullable": true }, "flowDefinitionDataUri": { "type": "string", "nullable": true }, "runId": { "type": "string", "nullable": true }, "runDisplayName": { "type": "string", "nullable": true }, "runExperimentName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "nodeVariant": { "type": "string", "nullable": true }, "variantRunId": { "type": "string", "nullable": true }, "baselineRunId": { "type": "string", "nullable": true }, "batchDataInput": { "$ref": "#/components/schemas/BatchDataInput" }, "inputsMapping": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "connections": { "type": "object", "additionalProperties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary" }, "description": "This is a dictionary", "nullable": true }, "environmentVariables": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "amlComputeName": { "type": "string", "nullable": true }, "runtimeName": { "type": "string", "nullable": true }, "sessionId": { "type": "string", "nullable": true }, "vmSize": { "type": "string", "nullable": true }, "maxIdleTimeSeconds": { "type": "integer", "format": "int64", "nullable": true }, "sessionSetupMode": { "$ref": "#/components/schemas/SessionSetupModeEnum" }, "outputDataStore": { "type": "string", "nullable": true }, "flowLineageId": { "type": "string", "nullable": true }, "runDisplayNameGenerationType": { "$ref": "#/components/schemas/RunDisplayNameGenerationType" } }, "additionalProperties": false }, "SubmitBulkRunResponse": { "type": "object", "properties": { "nextActionIntervalInSeconds": { "type": "integer", "format": "int32", "nullable": true }, "actionType": { "$ref": "#/components/schemas/ActionType" }, "flow_runs": { "type": "array", "items": { }, "nullable": true }, "node_runs": { "type": "array", "items": { }, "nullable": true }, "errorResponse": { "$ref": "#/components/schemas/ErrorResponse" }, "flowName": { "type": "string", "nullable": true }, "flowRunDisplayName": { "type": "string", "nullable": true }, "flowRunId": { "type": "string", "nullable": true }, "flowGraph": { "$ref": "#/components/schemas/FlowGraph" }, "flowGraphLayout": { "$ref": "#/components/schemas/FlowGraphLayout" }, "flowRunResourceId": { "type": "string", "nullable": true }, "bulkTestId": { "type": "string", "nullable": true }, "batchInputs": { "type": "array", "items": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary" }, "nullable": true }, "batchDataInput": { "$ref": "#/components/schemas/BatchDataInput" }, "createdBy": { "$ref": "#/components/schemas/SchemaContractsCreatedBy" }, "createdOn": { "type": "string", "format": "date-time", "nullable": true }, "flowRunType": { "$ref": "#/components/schemas/FlowRunTypeEnum" }, "flowType": { "$ref": "#/components/schemas/FlowType" }, "runtimeName": { "type": "string", "nullable": true }, "amlComputeName": { "type": "string", "nullable": true }, "flowRunLogs": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "flowTestMode": { "$ref": "#/components/schemas/FlowTestMode" }, "flowTestInfos": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowTestInfo" }, "nullable": true }, "workingDirectory": { "type": "string", "nullable": true }, "flowDagFileRelativePath": { "type": "string", "nullable": true }, "flowSnapshotId": { "type": "string", "nullable": true }, "variantRunToEvaluationRunsIdMapping": { "type": "object", "additionalProperties": { "type": "array", "items": { "type": "string" }, "nullable": true }, "nullable": true } }, "additionalProperties": false }, "SubmitFlowRequest": { "type": "object", "properties": { "flowRunId": { "type": "string", "nullable": true }, "flowRunDisplayName": { "type": "string", "nullable": true }, "flowId": { "type": "string", "nullable": true }, "flow": { "$ref": "#/components/schemas/Flow" }, "flowSubmitRunSettings": { "$ref": "#/components/schemas/FlowSubmitRunSettings" }, "asyncSubmission": { "type": "boolean" }, "useWorkspaceConnection": { "type": "boolean" }, "useFlowSnapshotToSubmit": { "type": "boolean" }, "enableBlobRunArtifacts": { "type": "boolean" }, "enableAsyncFlowTest": { "type": "boolean" }, "flowRuntimeSubmissionApiVersion": { "$ref": "#/components/schemas/FlowRuntimeSubmissionApiVersion" }, "runDisplayNameGenerationType": { "$ref": "#/components/schemas/RunDisplayNameGenerationType" } }, "additionalProperties": false }, "SubmitPipelineRunRequest": { "type": "object", "properties": { "computeTarget": { "type": "string", "nullable": true }, "flattenedSubGraphs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/PipelineSubDraft" }, "nullable": true }, "stepTags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "experimentName": { "type": "string", "nullable": true }, "pipelineParameters": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "dataPathAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/LegacyDataPath" }, "description": "This is a dictionary", "nullable": true }, "dataSetDefinitionValueAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/DataSetDefinitionValue" }, "description": "This is a dictionary", "nullable": true }, "assetOutputSettingsAssignments": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/AssetOutputSettings" }, "description": "This is a dictionary", "nullable": true }, "enableNotification": { "type": "boolean", "nullable": true }, "subPipelinesInfo": { "$ref": "#/components/schemas/SubPipelinesInfo" }, "displayName": { "type": "string", "nullable": true }, "runId": { "type": "string", "nullable": true }, "parentRunId": { "type": "string", "nullable": true }, "graph": { "$ref": "#/components/schemas/GraphDraftEntity" }, "pipelineRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameterAssignment" }, "nullable": true }, "moduleNodeRunSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeRunSetting" }, "nullable": true }, "moduleNodeUIInputSettings": { "type": "array", "items": { "$ref": "#/components/schemas/GraphModuleNodeUIInputSetting" }, "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "continueRunOnStepFailure": { "type": "boolean", "nullable": true }, "description": { "type": "string", "nullable": true }, "properties": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true }, "enforceRerun": { "type": "boolean", "nullable": true }, "datasetAccessModes": { "$ref": "#/components/schemas/DatasetAccessModes" } }, "additionalProperties": false }, "SuccessfulCommandReturnCode": { "enum": [ "Zero", "ZeroOrGreater" ], "type": "string" }, "SweepEarlyTerminationPolicy": { "type": "object", "properties": { "policyType": { "$ref": "#/components/schemas/EarlyTerminationPolicyType" }, "evaluationInterval": { "type": "integer", "format": "int32", "nullable": true }, "delayEvaluation": { "type": "integer", "format": "int32", "nullable": true }, "slackFactor": { "type": "number", "format": "float", "nullable": true }, "slackAmount": { "type": "number", "format": "float", "nullable": true }, "truncationPercentage": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "SweepSettings": { "type": "object", "properties": { "limits": { "$ref": "#/components/schemas/SweepSettingsLimits" }, "searchSpace": { "type": "array", "items": { "type": "object", "additionalProperties": { "type": "string" } }, "nullable": true }, "samplingAlgorithm": { "$ref": "#/components/schemas/SamplingAlgorithmType" }, "earlyTermination": { "$ref": "#/components/schemas/SweepEarlyTerminationPolicy" } }, "additionalProperties": false }, "SweepSettingsLimits": { "type": "object", "properties": { "maxTotalTrials": { "type": "integer", "format": "int32", "nullable": true }, "maxConcurrentTrials": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "SystemData": { "type": "object", "properties": { "createdAt": { "type": "string", "format": "date-time", "nullable": true }, "createdBy": { "type": "string", "nullable": true }, "createdByType": { "$ref": "#/components/schemas/UserType" }, "lastModifiedAt": { "type": "string", "format": "date-time", "nullable": true }, "lastModifiedBy": { "type": "string", "nullable": true }, "lastModifiedByType": { "$ref": "#/components/schemas/UserType" } }, "additionalProperties": false }, "SystemMeta": { "type": "object", "properties": { "identifierHash": { "type": "string", "nullable": true }, "extraHash": { "type": "string", "nullable": true }, "contentHash": { "type": "string", "nullable": true }, "identifierHashes": { "type": "object", "properties": { "IdentifierHash": { "type": "string" }, "IdentifierHashV2": { "type": "string" } }, "additionalProperties": false, "nullable": true }, "extraHashes": { "type": "object", "properties": { "IdentifierHash": { "type": "string" }, "IdentifierHashV2": { "type": "string" } }, "additionalProperties": false, "nullable": true } }, "additionalProperties": false }, "TabularTrainingMode": { "enum": [ "Distributed", "NonDistributed", "Auto" ], "type": "string" }, "TargetAggregationFunction": { "enum": [ "Sum", "Max", "Min", "Mean" ], "type": "string" }, "TargetLags": { "type": "object", "properties": { "mode": { "$ref": "#/components/schemas/TargetLagsMode" }, "values": { "type": "array", "items": { "type": "integer", "format": "int32" }, "nullable": true } }, "additionalProperties": false }, "TargetLagsMode": { "enum": [ "Auto", "Custom" ], "type": "string" }, "TargetRollingWindowSize": { "type": "object", "properties": { "mode": { "$ref": "#/components/schemas/TargetRollingWindowSizeMode" }, "value": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "TargetRollingWindowSizeMode": { "enum": [ "Auto", "Custom" ], "type": "string" }, "TargetSelectorConfiguration": { "type": "object", "properties": { "lowPriorityVMTolerant": { "type": "boolean" }, "clusterBlockList": { "type": "array", "items": { "type": "string" }, "nullable": true }, "computeType": { "type": "string", "nullable": true }, "instanceType": { "type": "array", "items": { "type": "string" }, "nullable": true }, "instanceTypes": { "type": "array", "items": { "type": "string" }, "nullable": true }, "myResourceOnly": { "type": "boolean" }, "planId": { "type": "string", "nullable": true }, "planRegionId": { "type": "string", "nullable": true }, "region": { "type": "array", "items": { "type": "string" }, "nullable": true }, "regions": { "type": "array", "items": { "type": "string" }, "nullable": true }, "vcBlockList": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "Task": { "type": "object", "properties": { "id": { "type": "integer", "format": "int32", "readOnly": true }, "exception": { "nullable": true, "readOnly": true }, "status": { "$ref": "#/components/schemas/TaskStatus" }, "isCanceled": { "type": "boolean", "readOnly": true }, "isCompleted": { "type": "boolean", "readOnly": true }, "isCompletedSuccessfully": { "type": "boolean", "readOnly": true }, "creationOptions": { "$ref": "#/components/schemas/TaskCreationOptions" }, "asyncState": { "nullable": true, "readOnly": true }, "isFaulted": { "type": "boolean", "readOnly": true } }, "additionalProperties": false }, "TaskControlFlowInfo": { "type": "object", "properties": { "controlFlowType": { "$ref": "#/components/schemas/ControlFlowType" }, "iterationIndex": { "type": "integer", "format": "int32" }, "itemName": { "type": "string", "nullable": true }, "parametersOverwritten": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "isReused": { "type": "boolean" } }, "additionalProperties": false }, "TaskCreationOptions": { "enum": [ "None", "PreferFairness", "LongRunning", "AttachedToParent", "DenyChildAttach", "HideScheduler", "RunContinuationsAsynchronously" ], "type": "string" }, "TaskReuseInfo": { "type": "object", "properties": { "experimentId": { "type": "string", "nullable": true }, "pipelineRunId": { "type": "string", "nullable": true }, "nodeId": { "type": "string", "nullable": true }, "requestId": { "type": "string", "nullable": true }, "runId": { "type": "string", "nullable": true }, "nodeStartTime": { "type": "string", "format": "date-time" }, "nodeEndTime": { "type": "string", "format": "date-time" } }, "additionalProperties": false }, "TaskStatus": { "enum": [ "Created", "WaitingForActivation", "WaitingToRun", "Running", "WaitingForChildrenToComplete", "RanToCompletion", "Canceled", "Faulted" ], "type": "string" }, "TaskStatusCode": { "enum": [ "NotStarted", "Queued", "Running", "Failed", "Finished", "Canceled", "PartiallyExecuted", "Bypassed" ], "type": "string" }, "TaskType": { "enum": [ "Classification", "Regression", "Forecasting", "ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER", "TextClassificationMultilabel" ], "type": "string" }, "TensorflowConfiguration": { "type": "object", "properties": { "workerCount": { "type": "integer", "format": "int32" }, "parameterServerCount": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "TestDataSettings": { "type": "object", "properties": { "testDataSize": { "type": "number", "format": "double", "nullable": true } }, "additionalProperties": false }, "Tool": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "type": { "$ref": "#/components/schemas/ToolType" }, "inputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/InputDefinition" }, "description": "This is a dictionary", "nullable": true }, "outputs": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/OutputDefinition" }, "description": "This is a dictionary", "nullable": true }, "description": { "type": "string", "nullable": true }, "connection_type": { "type": "array", "items": { "$ref": "#/components/schemas/ConnectionType" }, "nullable": true }, "module": { "type": "string", "nullable": true }, "class_name": { "type": "string", "nullable": true }, "source": { "type": "string", "nullable": true }, "lkgCode": { "type": "string", "nullable": true }, "code": { "type": "string", "nullable": true }, "function": { "type": "string", "nullable": true }, "action_type": { "type": "string", "nullable": true }, "provider_config": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/InputDefinition" }, "description": "This is a dictionary", "nullable": true }, "function_config": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/InputDefinition" }, "description": "This is a dictionary", "nullable": true }, "icon": { "nullable": true }, "category": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary", "nullable": true }, "is_builtin": { "type": "boolean" }, "package": { "type": "string", "nullable": true }, "package_version": { "type": "string", "nullable": true }, "default_prompt": { "type": "string", "nullable": true }, "enable_kwargs": { "type": "boolean" }, "deprecated_tools": { "type": "array", "items": { "type": "string" }, "nullable": true }, "tool_state": { "$ref": "#/components/schemas/ToolState" } }, "additionalProperties": false }, "ToolFuncCallScenario": { "enum": [ "generated_by", "reverse_generated_by", "dynamic_list" ], "type": "string" }, "ToolFuncResponse": { "type": "object", "properties": { "result": { "nullable": true }, "logs": { "type": "object", "additionalProperties": { "type": "string" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "ToolInputDynamicList": { "type": "object", "properties": { "func_path": { "type": "string", "nullable": true }, "func_kwargs": { "type": "array", "items": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary" }, "nullable": true } }, "additionalProperties": false }, "ToolInputGeneratedBy": { "type": "object", "properties": { "func_path": { "type": "string", "nullable": true }, "func_kwargs": { "type": "array", "items": { "type": "object", "additionalProperties": { }, "description": "This is a dictionary" }, "nullable": true }, "reverse_func_path": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ToolMetaDto": { "type": "object", "properties": { "tools": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/Tool" }, "description": "This is a dictionary", "nullable": true }, "errors": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ErrorResponse" }, "description": "This is a dictionary", "nullable": true } }, "additionalProperties": false }, "ToolSetting": { "type": "object", "properties": { "providers": { "type": "array", "items": { "$ref": "#/components/schemas/ProviderEntity" }, "nullable": true } }, "additionalProperties": false }, "ToolSourceMeta": { "type": "object", "properties": { "tool_type": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ToolState": { "enum": [ "Stable", "Preview", "Deprecated" ], "type": "string" }, "ToolType": { "enum": [ "llm", "python", "action", "prompt", "custom_llm", "csharp" ], "type": "string" }, "TorchDistributedConfiguration": { "type": "object", "properties": { "processCountPerNode": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "TrainingDiagnosticConfiguration": { "type": "object", "properties": { "jobHeartBeatTimeoutSeconds": { "type": "integer", "format": "int32", "nullable": true } }, "additionalProperties": false }, "TrainingOutput": { "type": "object", "properties": { "trainingOutputType": { "$ref": "#/components/schemas/TrainingOutputType" }, "iteration": { "type": "integer", "format": "int32", "nullable": true }, "metric": { "type": "string", "nullable": true }, "modelFile": { "type": "string", "nullable": true } }, "additionalProperties": false }, "TrainingOutputType": { "enum": [ "Metrics", "Model" ], "type": "string" }, "TrainingSettings": { "type": "object", "properties": { "blockListModels": { "type": "array", "items": { "type": "string" }, "nullable": true }, "allowListModels": { "type": "array", "items": { "type": "string" }, "nullable": true }, "enableDnnTraining": { "type": "boolean", "nullable": true }, "enableOnnxCompatibleModels": { "type": "boolean", "nullable": true }, "stackEnsembleSettings": { "$ref": "#/components/schemas/StackEnsembleSettings" }, "enableStackEnsemble": { "type": "boolean", "nullable": true }, "enableVoteEnsemble": { "type": "boolean", "nullable": true }, "ensembleModelDownloadTimeout": { "type": "string", "format": "date-span", "nullable": true }, "enableModelExplainability": { "type": "boolean", "nullable": true }, "trainingMode": { "$ref": "#/components/schemas/TabularTrainingMode" } }, "additionalProperties": false }, "TriggerAsyncOperationStatus": { "type": "object", "properties": { "id": { "type": "string", "nullable": true }, "operationType": { "$ref": "#/components/schemas/TriggerOperationType" }, "provisioningStatus": { "$ref": "#/components/schemas/ScheduleProvisioningStatus" }, "createdTime": { "type": "string", "format": "date-time", "nullable": true }, "endTime": { "type": "string", "format": "date-time", "nullable": true }, "error": { "$ref": "#/components/schemas/ErrorResponse" }, "statusCode": { "$ref": "#/components/schemas/HttpStatusCode" } }, "additionalProperties": false }, "TriggerOperationType": { "enum": [ "Create", "Update", "Delete", "CreateOrUpdate" ], "type": "string" }, "TriggerType": { "enum": [ "Recurrence", "Cron" ], "type": "string" }, "TuningNodeSetting": { "type": "object", "properties": { "variantIds": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "TypedAssetReference": { "type": "object", "properties": { "assetId": { "type": "string", "nullable": true }, "type": { "type": "string", "nullable": true } }, "additionalProperties": false }, "UIAzureOpenAIDeploymentNameSelector": { "type": "object", "properties": { "Capabilities": { "$ref": "#/components/schemas/UIAzureOpenAIModelCapabilities" } }, "additionalProperties": false }, "UIAzureOpenAIModelCapabilities": { "type": "object", "properties": { "Completion": { "type": "boolean", "nullable": true }, "ChatCompletion": { "type": "boolean", "nullable": true }, "Embeddings": { "type": "boolean", "nullable": true } }, "additionalProperties": false }, "UIColumnPicker": { "type": "object", "properties": { "columnPickerFor": { "type": "string", "nullable": true }, "columnSelectionCategories": { "type": "array", "items": { "type": "string" }, "nullable": true }, "singleColumnSelection": { "type": "boolean" } }, "additionalProperties": false }, "UIComputeSelection": { "type": "object", "properties": { "computeTypes": { "type": "array", "items": { "type": "string" }, "nullable": true }, "requireGpu": { "type": "boolean", "nullable": true }, "osTypes": { "type": "array", "items": { "type": "string" }, "nullable": true }, "supportServerless": { "type": "boolean" }, "computeRunSettingsMapping": { "type": "object", "additionalProperties": { "type": "array", "items": { "$ref": "#/components/schemas/RunSettingParameter" }, "nullable": true }, "nullable": true } }, "additionalProperties": false }, "UIHyperparameterConfiguration": { "type": "object", "properties": { "modelNameToHyperParameterAndDistributionMapping": { "type": "object", "additionalProperties": { "type": "object", "additionalProperties": { "type": "array", "items": { "type": "string" } }, "nullable": true }, "nullable": true }, "distributionParametersMapping": { "type": "object", "additionalProperties": { "type": "array", "items": { "$ref": "#/components/schemas/DistributionParameter" }, "nullable": true }, "nullable": true }, "jsonSchema": { "type": "string", "nullable": true } }, "additionalProperties": false }, "UIInputDataDeliveryMode": { "enum": [ "Read-only mount", "Read-write mount", "Download", "Direct", "Evaluate mount", "Evaluate download", "Hdfs" ], "type": "string" }, "UIInputSetting": { "type": "object", "properties": { "name": { "type": "string", "nullable": true }, "dataDeliveryMode": { "$ref": "#/components/schemas/UIInputDataDeliveryMode" }, "pathOnCompute": { "type": "string", "nullable": true } }, "additionalProperties": false }, "UIJsonEditor": { "type": "object", "properties": { "jsonSchema": { "type": "string", "nullable": true } }, "additionalProperties": false }, "UIParameterHint": { "type": "object", "properties": { "uiWidgetType": { "$ref": "#/components/schemas/UIWidgetTypeEnum" }, "columnPicker": { "$ref": "#/components/schemas/UIColumnPicker" }, "uiScriptLanguage": { "$ref": "#/components/schemas/UIScriptLanguageEnum" }, "jsonEditor": { "$ref": "#/components/schemas/UIJsonEditor" }, "PromptFlowConnectionSelector": { "$ref": "#/components/schemas/UIPromptFlowConnectionSelector" }, "AzureOpenAIDeploymentNameSelector": { "$ref": "#/components/schemas/UIAzureOpenAIDeploymentNameSelector" }, "UxIgnore": { "type": "boolean" }, "Anonymous": { "type": "boolean" } }, "additionalProperties": false }, "UIPromptFlowConnectionSelector": { "type": "object", "properties": { "PromptFlowConnectionType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "UIScriptLanguageEnum": { "enum": [ "None", "Python", "R", "Json", "Sql" ], "type": "string" }, "UIWidgetMetaInfo": { "type": "object", "properties": { "moduleNodeId": { "type": "string", "nullable": true }, "metaModuleId": { "type": "string", "nullable": true }, "parameterName": { "type": "string", "nullable": true }, "uiWidgetType": { "$ref": "#/components/schemas/UIWidgetTypeEnum" } }, "additionalProperties": false }, "UIWidgetTypeEnum": { "enum": [ "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection" ], "type": "string" }, "UIYamlEditor": { "type": "object", "properties": { "jsonSchema": { "type": "string", "nullable": true } }, "additionalProperties": false }, "UnversionedEntityRequestDto": { "type": "object", "properties": { "unversionedEntityIds": { "type": "array", "items": { "type": "string" }, "nullable": true } }, "additionalProperties": false }, "UnversionedEntityResponseDto": { "type": "object", "properties": { "unversionedEntities": { "type": "array", "items": { "$ref": "#/components/schemas/FlowIndexEntity" }, "nullable": true }, "unversionedEntityJsonSchema": { "nullable": true }, "normalizedRequestCharge": { "type": "number", "format": "double" }, "normalizedRequestChargePeriod": { "type": "string", "format": "date-span" } }, "additionalProperties": false }, "UnversionedRebuildIndexDto": { "type": "object", "properties": { "continuationToken": { "type": "string", "nullable": true }, "entityCount": { "type": "integer", "format": "int32", "nullable": true }, "entityContainerType": { "type": "string", "nullable": true }, "entityType": { "type": "string", "nullable": true }, "resourceId": { "type": "string", "nullable": true }, "workspaceId": { "type": "string", "nullable": true }, "immutableResourceId": { "type": "string", "format": "uuid" }, "startTime": { "type": "string", "format": "date-time", "nullable": true }, "endTime": { "type": "string", "format": "date-time", "nullable": true } }, "additionalProperties": false }, "UnversionedRebuildResponseDto": { "type": "object", "properties": { "entities": { "$ref": "#/components/schemas/SegmentedResult`1" }, "unversionedEntitySchema": { "nullable": true }, "normalizedRequestCharge": { "type": "number", "format": "double" }, "normalizedRequestChargePeriod": { "type": "string", "format": "date-span" } }, "additionalProperties": false }, "UpdateComponentRequest": { "type": "object", "properties": { "displayName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "moduleUpdateOperationType": { "$ref": "#/components/schemas/ModuleUpdateOperationType" }, "moduleVersion": { "type": "string", "nullable": true } }, "additionalProperties": false }, "UpdateFlowRequest": { "type": "object", "properties": { "flowRunResult": { "$ref": "#/components/schemas/FlowRunResult" }, "flowTestMode": { "$ref": "#/components/schemas/FlowTestMode" }, "flowTestInfos": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/FlowTestInfo" }, "nullable": true }, "flowName": { "type": "string", "nullable": true }, "description": { "type": "string", "nullable": true }, "details": { "type": "string", "nullable": true }, "tags": { "type": "object", "additionalProperties": { "type": "string", "nullable": true }, "nullable": true }, "flow": { "$ref": "#/components/schemas/Flow" }, "flowDefinitionFilePath": { "type": "string", "nullable": true }, "flowType": { "$ref": "#/components/schemas/FlowType" }, "flowRunSettings": { "$ref": "#/components/schemas/FlowRunSettings" }, "isArchived": { "type": "boolean" }, "vmSize": { "type": "string", "nullable": true }, "maxIdleTimeSeconds": { "type": "integer", "format": "int64", "nullable": true }, "identity": { "type": "string", "nullable": true } }, "additionalProperties": false }, "UpdateFlowRuntimeRequest": { "type": "object", "properties": { "runtimeDescription": { "type": "string", "nullable": true }, "environment": { "type": "string", "nullable": true }, "instanceCount": { "type": "integer", "format": "int32" } }, "additionalProperties": false }, "UpdateRegistryComponentRequest": { "type": "object", "properties": { "registryName": { "type": "string", "nullable": true }, "componentName": { "type": "string", "nullable": true }, "componentVersion": { "type": "string", "nullable": true }, "updateType": { "$ref": "#/components/schemas/UpdateType" } }, "additionalProperties": false }, "UpdateType": { "enum": [ "SetDefaultVersion" ], "type": "string" }, "UploadOptions": { "type": "object", "properties": { "overwrite": { "type": "boolean" }, "sourceGlobs": { "$ref": "#/components/schemas/ExecutionGlobsOptions" } }, "additionalProperties": false }, "UploadState": { "enum": [ "Uploading", "Completed", "Canceled", "Failed" ], "type": "string" }, "UriReference": { "type": "object", "properties": { "path": { "type": "string", "nullable": true }, "isFile": { "type": "boolean" } }, "additionalProperties": false }, "UseStl": { "enum": [ "Season", "SeasonTrend" ], "type": "string" }, "User": { "type": "object", "properties": { "userObjectId": { "type": "string", "description": "A user or service principal's object ID.\r\nThis is EUPI and may only be logged to warm path telemetry.", "nullable": true }, "userPuId": { "type": "string", "description": "A user or service principal's PuID.\r\nThis is PII and should never be logged.", "nullable": true }, "userIdp": { "type": "string", "description": "A user identity provider. Eg live.com\r\nThis is PII and should never be logged.", "nullable": true }, "userAltSecId": { "type": "string", "description": "A user alternate sec id. This represents the user in a different identity provider system Eg.1:live.com:puid\r\nThis is PII and should never be logged.", "nullable": true }, "userIss": { "type": "string", "description": "The issuer which issed the token for this user.\r\nThis is PII and should never be logged.", "nullable": true }, "userTenantId": { "type": "string", "description": "A user or service principal's tenant ID.", "nullable": true }, "userName": { "type": "string", "description": "A user's full name or a service principal's app ID.\r\nThis is PII and should never be logged.", "nullable": true }, "upn": { "type": "string", "description": "A user's Principal name (upn)\r\nThis is PII andshould never be logged", "nullable": true } }, "additionalProperties": false }, "UserAssignedIdentity": { "type": "object", "properties": { "principalId": { "type": "string", "format": "uuid" }, "clientId": { "type": "string", "format": "uuid" } }, "additionalProperties": false }, "UserType": { "enum": [ "User", "Application", "ManagedIdentity", "Key" ], "type": "string" }, "ValidationDataSettings": { "type": "object", "properties": { "nCrossValidations": { "$ref": "#/components/schemas/NCrossValidations" }, "validationDataSize": { "type": "number", "format": "double", "nullable": true }, "cvSplitColumnNames": { "type": "array", "items": { "type": "string" }, "nullable": true }, "validationType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "ValidationStatus": { "enum": [ "Succeeded", "Failed" ], "type": "string" }, "ValueType": { "enum": [ "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image" ], "type": "string" }, "VariantNode": { "type": "object", "properties": { "node": { "$ref": "#/components/schemas/Node" }, "description": { "type": "string", "nullable": true } }, "additionalProperties": false }, "VmPriority": { "enum": [ "Dedicated", "Lowpriority" ], "type": "string" }, "WebServiceComputeMetaInfo": { "type": "object", "properties": { "nodeCount": { "type": "integer", "format": "int32" }, "isSslEnabled": { "type": "boolean" }, "aksNotFound": { "type": "boolean" }, "clusterPurpose": { "type": "string", "nullable": true }, "publicIpAddress": { "type": "string", "nullable": true }, "vmSize": { "type": "string", "nullable": true }, "location": { "type": "string", "nullable": true }, "provisioningState": { "type": "string", "nullable": true }, "state": { "type": "string", "nullable": true }, "osType": { "type": "string", "nullable": true }, "id": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true }, "createdByStudio": { "type": "boolean" }, "isGpuType": { "type": "boolean" }, "resourceId": { "type": "string", "nullable": true }, "computeType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "WebServicePort": { "type": "object", "properties": { "nodeId": { "type": "string", "nullable": true }, "portName": { "type": "string", "nullable": true }, "name": { "type": "string", "nullable": true } }, "additionalProperties": false }, "WebServiceState": { "enum": [ "Transitioning", "Healthy", "Unhealthy", "Failed", "Unschedulable" ], "type": "string" }, "Webhook": { "type": "object", "properties": { "webhookType": { "$ref": "#/components/schemas/WebhookType" }, "eventType": { "type": "string", "nullable": true } }, "additionalProperties": false }, "WebhookType": { "enum": [ "AzureDevOps" ], "type": "string" }, "WeekDays": { "enum": [ "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday" ], "type": "string" }, "Weekday": { "enum": [ "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday" ], "type": "string" }, "WorkspaceConnectionSpec": { "type": "object", "properties": { "connectionCategory": { "$ref": "#/components/schemas/ConnectionCategory" }, "flowValueType": { "$ref": "#/components/schemas/ValueType" }, "connectionType": { "$ref": "#/components/schemas/ConnectionType" }, "connectionTypeDisplayName": { "type": "string", "nullable": true }, "configSpecs": { "type": "array", "items": { "$ref": "#/components/schemas/ConnectionConfigSpec" }, "nullable": true }, "module": { "type": "string", "nullable": true } }, "additionalProperties": false }, "YarnDeployMode": { "enum": [ "None", "Client", "Cluster" ], "type": "string" } }, "parameters": { "subscriptionIdParameter": { "name": "subscriptionId", "in": "path", "description": "The Azure Subscription ID.", "required": true, "schema": { "type": "string", "format": "uuid" }, "x-ms-parameter-location": "method" }, "resourceGroupNameParameter": { "name": "resourceGroupName", "in": "path", "description": "The Name of the resource group in which the workspace is located.", "required": true, "schema": { "type": "string" }, "x-ms-parameter-location": "method" }, "workspaceNameParameter": { "name": "workspaceName", "in": "path", "description": "The name of the workspace.", "required": true, "schema": { "type": "string" }, "x-ms-parameter-location": "method" } }, "securitySchemes": { "azure_auth": { "type": "oauth2", "flows": { "implicit": { "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", "scopes": { "user_impersonation": "impersonate your user account" } } } } } }, "security": [ { "azure_auth": [ "user_impersonation" ] } ] }
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # ---------------------------------------------------------
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow_service_caller.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- """service_caller.py, module for interacting with the AzureML service.""" import json import os import sys import time import uuid from functools import wraps, cached_property import pydash from azure.core.exceptions import HttpResponseError, ResourceExistsError from azure.core.pipeline.policies import RetryPolicy from promptflow._sdk._telemetry import request_id_context from promptflow._sdk._telemetry import TelemetryMixin from promptflow._utils.logger_utils import LoggerFactory from promptflow.azure._constants._flow import AUTOMATIC_RUNTIME, SESSION_CREATION_TIMEOUT_ENV_VAR from promptflow.azure._restclient.flow import AzureMachineLearningDesignerServiceClient from promptflow.azure._utils.gerneral import get_authorization, get_arm_token, get_aml_token from promptflow.exceptions import UserErrorException, PromptflowException, SystemErrorException logger = LoggerFactory.get_logger(__name__) class FlowRequestException(SystemErrorException): """FlowRequestException.""" def __init__(self, message, **kwargs): super().__init__(message, **kwargs) class RequestTelemetryMixin(TelemetryMixin): def __init__(self): super().__init__() self._refresh_request_id_for_telemetry() self._from_cli = False def _get_telemetry_values(self, *args, **kwargs): return {"request_id": self._request_id, "from_cli": self._from_cli} def _set_from_cli_for_telemetry(self): self._from_cli = True def _refresh_request_id_for_telemetry(self): # refresh request id from current request id context self._request_id = request_id_context.get() or str(uuid.uuid4()) def _request_wrapper(): """Wrapper for request. Will refresh request id and pretty print exception.""" def exception_wrapper(func): @wraps(func) def wrapper(self, *args, **kwargs): if not isinstance(self, RequestTelemetryMixin): raise PromptflowException(f"Wrapped function is not RequestTelemetryMixin, got {type(self)}") # refresh request before each request self._refresh_request_id_for_telemetry() try: return func(self, *args, **kwargs) except HttpResponseError as e: raise FlowRequestException( f"Calling {func.__name__} failed with request id: {self._request_id} \n" f"Status code: {e.status_code} \n" f"Reason: {e.reason} \n" f"Error message: {e.message} \n" ) return wrapper return exception_wrapper class FlowServiceCaller(RequestTelemetryMixin): """FlowServiceCaller. :param workspace: workspace :type workspace: Workspace :param base_url: base url :type base_url: Service URL """ # The default namespace placeholder is used when namespace is None for get_module API. DEFAULT_COMPONENT_NAMESPACE_PLACEHOLDER = "-" DEFAULT_MODULE_WORKING_MECHANISM = "OutputToDataset" DEFAULT_DATATYPE_MECHANISM = "RegisterBuildinDataTypeOnly" FLOW_CLUSTER_ADDRESS = "FLOW_CLUSTER_ADDRESS" WORKSPACE_INDEPENDENT_ENDPOINT_ADDRESS = "WORKSPACE_INDEPENDENT_ENDPOINT_ADDRESS" DEFAULT_BASE_URL = "https://{}.api.azureml.ms" MASTER_BASE_API = "https://master.api.azureml-test.ms" DEFAULT_BASE_REGION = "westus2" AML_USE_ARM_TOKEN = "AML_USE_ARM_TOKEN" def __init__(self, workspace, credential, operation_scope, base_url=None, region=None, **kwargs): """Initializes DesignerServiceCaller.""" if "get_instance" != sys._getframe().f_back.f_code.co_name: raise UserErrorException( "Please use `_FlowServiceCallerFactory.get_instance()` to get service caller " "instead of creating a new one." ) super().__init__() # self._service_context = workspace.service_context if base_url is None: # handle vnet scenario, it's discovery url will have workspace id after discovery base_url = workspace.discovery_url.split("discovery")[0] # for dev test, change base url with environment variable base_url = os.environ.get(self.FLOW_CLUSTER_ADDRESS, default=base_url) self._workspace = workspace self._operation_scope = operation_scope self._service_endpoint = base_url self._credential = credential retry_policy = RetryPolicy() # stop retry 500 since it will cause 409 for run creation scenario retry_policy._retry_on_status_codes.remove(500) self.caller = AzureMachineLearningDesignerServiceClient(base_url=base_url, retry_policy=retry_policy, **kwargs) def _get_headers(self): custom_header = { "Authorization": get_authorization(credential=self._credential), "x-ms-client-request-id": self._request_id, } return custom_header def _set_headers_with_user_aml_token(self, headers): aml_token = get_aml_token(credential=self._credential) headers["aml-user-token"] = aml_token def _get_user_identity_info(self): import jwt token = get_arm_token(credential=self._credential) decoded_token = jwt.decode(token, options={"verify_signature": False}) user_object_id, user_tenant_id = decoded_token["oid"], decoded_token["tid"] return user_object_id, user_tenant_id @cached_property def _common_azure_url_pattern(self): operation_scope = self._operation_scope pattern = ( f"/subscriptions/{operation_scope.subscription_id}" f"/resourceGroups/{operation_scope.resource_group_name}" f"/providers/Microsoft.MachineLearningServices" f"/workspaces/{operation_scope.workspace_name}" ) return pattern @_request_wrapper() def create_flow( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str experiment_id=None, # type: Optional[str] body=None, # type: Optional["_models.CreateFlowRequest"] **kwargs, # type: Any ): headers = self._get_headers() return self.caller.flows.create_flow( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, experiment_id=experiment_id, body=body, headers=headers, **kwargs, ) @_request_wrapper() def create_component_from_flow( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str body=None, # type: Optional["_models.LoadFlowAsComponentRequest"] **kwargs, # type: Any ): headers = self._get_headers() try: return self.caller.flows.load_as_component( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, body=body, headers=headers, **kwargs, ) except ResourceExistsError: return ( f"/subscriptions/{subscription_id}/resourceGroups/{resource_group_name}" f"/providers/Microsoft.MachineLearningServices/workspaces/{workspace_name}" f"/components/{body.component_name}/versions/{body.component_version}" ) @_request_wrapper() def list_flows( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str experiment_id=None, # type: Optional[str] owned_only=None, # type: Optional[bool] flow_type=None, # type: Optional[Union[str, "_models.FlowType"]] list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] **kwargs, # type: Any ): headers = self._get_headers() return self.caller.flows.list_flows( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, experiment_id=experiment_id, owned_only=owned_only, flow_type=flow_type, list_view_type=list_view_type, headers=headers, **kwargs, ) @_request_wrapper() def submit_flow( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str experiment_id, # type: str endpoint_name=None, # type: Optional[str] body=None, # type: Optional["_models.SubmitFlowRequest"] **kwargs, # type: Any ): headers = self._get_headers() return self.caller.flows.submit_flow( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, experiment_id=experiment_id, endpoint_name=endpoint_name, body=body, headers=headers, **kwargs, ) @_request_wrapper() def get_flow( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str experiment_id, # type: str **kwargs, # type: Any ): headers = self._get_headers() return self.caller.flows.get_flow( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, experiment_id=experiment_id, flow_id=flow_id, headers=headers, **kwargs, ) @_request_wrapper() def get_flow_run( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str **kwargs, # type: Any ): """Get flow run.""" headers = self._get_headers() return self.caller.bulk_runs.get_flow_run_info( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_run_id=flow_run_id, headers=headers, **kwargs, ) @_request_wrapper() def create_connection( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str body=None, # type: Optional["_models.CreateOrUpdateConnectionRequest"] **kwargs, # type: Any ): headers = self._get_headers() return self.caller.connections.create_connection( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, body=body, headers=headers, **kwargs, ) @_request_wrapper() def update_connection( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str body=None, # type: Optional["_models.CreateOrUpdateConnectionRequestDto"] **kwargs, # type: Any ): headers = self._get_headers() return self.caller.connections.update_connection( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, body=body, headers=headers, **kwargs, ) @_request_wrapper() def get_connection( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs, # type: Any ): headers = self._get_headers() return self.caller.connections.get_connection( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, headers=headers, **kwargs, ) @_request_wrapper() def delete_connection( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs, # type: Any ): headers = self._get_headers() return self.caller.connections.delete_connection( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, headers=headers, **kwargs, ) @_request_wrapper() def list_connections( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs, # type: Any ): headers = self._get_headers() return self.caller.connections.list_connections( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, headers=headers, **kwargs, ) @_request_wrapper() def list_connection_specs( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs, # type: Any ): headers = self._get_headers() return self.caller.connections.list_connection_specs( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, headers=headers, **kwargs, ) @_request_wrapper() def submit_bulk_run( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str body=None, # type: Optional["_models.SubmitBulkRunRequest"] **kwargs, # type: Any ): """submit_bulk_run. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param body: :type body: ~flow.models.SubmitBulkRunRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ headers = self._get_headers() # pass user aml token to flow run submission self._set_headers_with_user_aml_token(headers) return self.caller.bulk_runs.submit_bulk_run( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, headers=headers, body=body, **kwargs, ) @_request_wrapper() def create_flow_session( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str session_id, # type: str body, # type: Optional["_models.CreateFlowSessionRequest"] **kwargs, # type: Any ): from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error, ) from promptflow.azure._restclient.flow.operations._flow_sessions_operations import ( build_create_flow_session_request, _convert_request, _models, ) from promptflow.azure._constants._flow import SESSION_CREATION_TIMEOUT_SECONDS from promptflow.azure._restclient.flow.models import SetupFlowSessionAction headers = self._get_headers() # pass user aml token to session create so user don't need to do authentication again in CI self._set_headers_with_user_aml_token(headers) # did not call self.caller.flow_sessions.create_flow_session because it does not support return headers cls = kwargs.pop("cls", None) # type: ClsType[Any] error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] _json = self.caller.flow_sessions._serialize.body(body, "CreateFlowSessionRequest") request = build_create_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, session_id=session_id, content_type=content_type, json=_json, template_url=self.caller.flow_sessions.create_flow_session.metadata["url"], headers=headers, ) request = _convert_request(request) request.url = self.caller.flow_sessions._client.format_url(request.url) pipeline_response = self.caller.flow_sessions._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self.caller.flow_sessions._deserialize.failsafe_deserialize( _models.ErrorResponse, pipeline_response ) raise HttpResponseError(response=response, model=error) if response.status_code == 200: return action = body.action or SetupFlowSessionAction.INSTALL.value if action == SetupFlowSessionAction.INSTALL.value: action = "creation" else: action = "reset" logger.info(f"Start polling until session {action} is completed...") # start polling status here. if "azure-asyncoperation" not in response.headers: raise FlowRequestException( "No polling url found in response headers. " f"Request id: {headers['x-ms-client-request-id']}. " f"Response headers: {response.headers}." ) polling_url = response.headers["azure-asyncoperation"] time_run = 0 sleep_period = 5 status = None timeout_seconds = SESSION_CREATION_TIMEOUT_SECONDS # polling timeout, if user set SESSION_CREATION_TIMEOUT_SECONDS in environment var, use it if os.environ.get(SESSION_CREATION_TIMEOUT_ENV_VAR): try: timeout_seconds = float(os.environ.get(SESSION_CREATION_TIMEOUT_ENV_VAR)) except ValueError: raise UserErrorException( "Environment variable {} with value {} set but failed to parse. " "Please reset the value to a number.".format( SESSION_CREATION_TIMEOUT_ENV_VAR, os.environ.get(SESSION_CREATION_TIMEOUT_ENV_VAR) ) ) # InProgress is only known non-terminal status for now. while status in [None, "InProgress"]: if time_run + sleep_period > timeout_seconds: message = ( f"Polling timeout for session {session_id} {action} " f"for {AUTOMATIC_RUNTIME} after {timeout_seconds} seconds.\n" f"To proceed the {action} for {AUTOMATIC_RUNTIME}, you can retry using the same flow, " "and we will continue polling status of previous session. \n" ) raise Exception(message) time_run += sleep_period time.sleep(sleep_period) response = self.poll_operation_status(url=polling_url, **kwargs) status = response["status"] logger.debug(f"Current polling status: {status}") if time_run % 30 == 0: # print the message every 30 seconds to avoid users feeling stuck during the operation print(f"Waiting for session {action}, current status: {status}") else: logger.debug(f"Waiting for session {action}, current status: {status}") if status == "Succeeded": error_msg = pydash.get(response, "error.message", None) if error_msg: logger.warning( f"Session {action} finished with status {status}. " f"But there are warnings when installing the packages: {error_msg}." ) else: logger.info(f"Session {action} finished with status {status}.") else: # refine response error message try: response["error"]["message"] = json.loads(response["error"]["message"]) except Exception: pass raise FlowRequestException( f"Session {action} failed for {session_id}. \n" f"Session {action} status: {status}. \n" f"Request id: {headers['x-ms-client-request-id']}. \n" f"{json.dumps(response, indent=2)}." ) @_request_wrapper() def poll_operation_status( self, url, **kwargs # type: Any ): from azure.core.rest import HttpRequest from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error, ) from promptflow.azure._restclient.flow.operations._flow_sessions_operations import _models headers = self._get_headers() request = HttpRequest(method="GET", url=url, headers=headers, **kwargs) pipeline_response = self.caller.flow_sessions._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self.caller.flow_sessions._deserialize.failsafe_deserialize( _models.ErrorResponse, pipeline_response ) raise HttpResponseError(response=response, model=error) deserialized = self.caller.flow_sessions._deserialize("object", pipeline_response) if "status" not in deserialized: raise FlowRequestException( f"Status not found in response. Request id: {headers['x-ms-client-request-id']}. " f"Response headers: {response.headers}." ) return deserialized @_request_wrapper() def get_child_runs( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str index=None, # type: Optional[int] start_index=None, # type: Optional[int] end_index=None, # type: Optional[int] **kwargs, # type: Any ): """Get child runs of a flow run.""" headers = self._get_headers() return self.caller.bulk_runs.get_flow_child_runs( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_run_id=flow_run_id, index=index, start_index=start_index, end_index=end_index, headers=headers, **kwargs, ) @_request_wrapper() def cancel_flow_run( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str **kwargs, # type: Any ): """Cancel a flow run.""" headers = self._get_headers() return self.caller.bulk_runs.cancel_flow_run( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_run_id=flow_run_id, headers=headers, **kwargs, )
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/service_caller_factory.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from threading import Lock from promptflow.azure._restclient.flow_service_caller import FlowServiceCaller class _FlowServiceCallerFactory: caller_cache_by_workspace_id = {} _instance_lock = Lock() @classmethod def get_instance(cls, workspace, credential, operation_scope, region=None, **kwargs) -> FlowServiceCaller: """Get instance of flow service caller. :param workspace: workspace """ cache_id = workspace.id if workspace else region cache = cls.caller_cache_by_workspace_id if cache_id not in cache: with _FlowServiceCallerFactory._instance_lock: if cache_id not in cache: cache[cache_id] = FlowServiceCaller(workspace, credential=credential, operation_scope=operation_scope, region=region, **kwargs) return cache[cache_id]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/_vendor.py
# -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from azure.core.pipeline.transport import HttpRequest def _convert_request(request, files=None): data = request.content if not files else None request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) if files: request.set_formdata_body(files) return request def _format_url_section(template, **kwargs): components = template.split("/") while components: try: return template.format(**kwargs) except KeyError as key: formatted_components = template.split("/") components = [ c for c in formatted_components if "{}".format(key.args[0]) not in c ] template = "/".join(components)
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/__init__.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from ._azure_machine_learning_designer_service_client import AzureMachineLearningDesignerServiceClient __all__ = ['AzureMachineLearningDesignerServiceClient'] # `._patch.py` is used for handwritten extensions to the generated code # Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md from ._patch import patch_sdk patch_sdk()
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/_azure_machine_learning_designer_service_client.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy from typing import TYPE_CHECKING from azure.core import PipelineClient from msrest import Deserializer, Serializer from . import models from ._configuration import AzureMachineLearningDesignerServiceClientConfiguration from .operations import BulkRunsOperations, ConnectionOperations, ConnectionsOperations, FlowRunsAdminOperations, FlowRuntimesOperations, FlowRuntimesWorkspaceIndependentOperations, FlowSessionsOperations, FlowsOperations, FlowsProviderOperations, ToolsOperations if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Optional from azure.core.rest import HttpRequest, HttpResponse class AzureMachineLearningDesignerServiceClient(object): """AzureMachineLearningDesignerServiceClient. :ivar bulk_runs: BulkRunsOperations operations :vartype bulk_runs: flow.operations.BulkRunsOperations :ivar connection: ConnectionOperations operations :vartype connection: flow.operations.ConnectionOperations :ivar connections: ConnectionsOperations operations :vartype connections: flow.operations.ConnectionsOperations :ivar flow_runs_admin: FlowRunsAdminOperations operations :vartype flow_runs_admin: flow.operations.FlowRunsAdminOperations :ivar flow_runtimes: FlowRuntimesOperations operations :vartype flow_runtimes: flow.operations.FlowRuntimesOperations :ivar flow_runtimes_workspace_independent: FlowRuntimesWorkspaceIndependentOperations operations :vartype flow_runtimes_workspace_independent: flow.operations.FlowRuntimesWorkspaceIndependentOperations :ivar flows: FlowsOperations operations :vartype flows: flow.operations.FlowsOperations :ivar flow_sessions: FlowSessionsOperations operations :vartype flow_sessions: flow.operations.FlowSessionsOperations :ivar flows_provider: FlowsProviderOperations operations :vartype flows_provider: flow.operations.FlowsProviderOperations :ivar tools: ToolsOperations operations :vartype tools: flow.operations.ToolsOperations :param base_url: Service URL. Default value is ''. :type base_url: str :param api_version: Api Version. The default value is "1.0.0". :type api_version: str """ def __init__( self, base_url="", # type: str api_version="1.0.0", # type: Optional[str] **kwargs # type: Any ): # type: (...) -> None self._config = AzureMachineLearningDesignerServiceClientConfiguration(api_version=api_version, **kwargs) self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False self.bulk_runs = BulkRunsOperations(self._client, self._config, self._serialize, self._deserialize) self.connection = ConnectionOperations(self._client, self._config, self._serialize, self._deserialize) self.connections = ConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) self.flow_runs_admin = FlowRunsAdminOperations(self._client, self._config, self._serialize, self._deserialize) self.flow_runtimes = FlowRuntimesOperations(self._client, self._config, self._serialize, self._deserialize) self.flow_runtimes_workspace_independent = FlowRuntimesWorkspaceIndependentOperations(self._client, self._config, self._serialize, self._deserialize) self.flows = FlowsOperations(self._client, self._config, self._serialize, self._deserialize) self.flow_sessions = FlowSessionsOperations(self._client, self._config, self._serialize, self._deserialize) self.flows_provider = FlowsProviderOperations(self._client, self._config, self._serialize, self._deserialize) self.tools = ToolsOperations(self._client, self._config, self._serialize, self._deserialize) def _send_request( self, request, # type: HttpRequest **kwargs # type: Any ): # type: (...) -> HttpResponse """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") <HttpRequest [GET], url: 'https://www.example.org/'> >>> response = client._send_request(request) <HttpResponse: 200 OK> For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest :keyword bool stream: Whether the response payload will be streamed. Defaults to False. :return: The response of your network call. Does not do error handling on your response. :rtype: ~azure.core.rest.HttpResponse """ request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) return self._client.send_request(request_copy, **kwargs) def close(self): # type: () -> None self._client.close() def __enter__(self): # type: () -> AzureMachineLearningDesignerServiceClient self._client.__enter__() return self def __exit__(self, *exc_details): # type: (Any) -> None self._client.__exit__(*exc_details)
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/py.typed
# Marker file for PEP 561.
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/_patch.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- # coding=utf-8 # -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- # This file is used for handwritten extensions to the generated code. Example: # https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md def patch_sdk(): pass
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/_configuration.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Optional VERSION = "unknown" class AzureMachineLearningDesignerServiceClientConfiguration(Configuration): """Configuration for AzureMachineLearningDesignerServiceClient. Note that all parameters used to create this instance are saved as instance attributes. :param api_version: Api Version. The default value is "1.0.0". :type api_version: str """ def __init__( self, api_version="1.0.0", # type: Optional[str] **kwargs # type: Any ): # type: (...) -> None super(AzureMachineLearningDesignerServiceClientConfiguration, self).__init__(**kwargs) self.api_version = api_version kwargs.setdefault('sdk_moniker', 'azuremachinelearningdesignerserviceclient/{}'.format(VERSION)) self._configure(**kwargs) def _configure( self, **kwargs # type: Any ): # type: (...) -> None self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy')
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_flow_runs_admin_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False # fmt: off def build_submit_bulk_run_async_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest error_handling_mode = kwargs.pop('error_handling_mode', None) # type: Optional[Union[str, "_models.ErrorHandlingMode"]] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/submit') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "bulkRunId": _SERIALIZER.url("bulk_run_id", bulk_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if error_handling_mode is not None: query_parameters['errorHandlingMode'] = _SERIALIZER.query("error_handling_mode", error_handling_mode, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_send_policy_validation_async_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/policy') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "bulkRunId": _SERIALIZER.url("bulk_run_id", bulk_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_check_policy_validation_async_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/policy') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "bulkRunId": _SERIALIZER.url("bulk_run_id", bulk_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_log_result_for_bulk_run_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/LogResult') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "bulkRunId": _SERIALIZER.url("bulk_run_id", bulk_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_get_storage_info_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/storageInfo') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_log_flow_run_event_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str runtime_version, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/flowRuns/{flowRunId}/runtime/{runtimeVersion}/logEvent') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), "runtimeVersion": _SERIALIZER.url("runtime_version", runtime_version, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_log_flow_run_event_v2_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest runtime_version = kwargs.pop('runtime_version', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/flowRuns/{flowRunId}/logEvent') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if runtime_version is not None: query_parameters['runtimeVersion'] = _SERIALIZER.query("runtime_version", runtime_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_log_flow_run_terminated_event_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest last_checked_time = kwargs.pop('last_checked_time', None) # type: Optional[datetime.datetime] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/flowRuns/{flowRunId}/logTerminatedEvent') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if last_checked_time is not None: query_parameters['lastCheckedTime'] = _SERIALIZER.query("last_checked_time", last_checked_time, 'iso-8601') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_update_service_logs_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/serviceLogs') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "bulkRunId": _SERIALIZER.url("bulk_run_id", bulk_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_batch_update_service_logs_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/serviceLogs/batch') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "bulkRunId": _SERIALIZER.url("bulk_run_id", bulk_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) # fmt: on class FlowRunsAdminOperations(object): """FlowRunsAdminOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def submit_bulk_run_async( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_run_id, # type: str error_handling_mode=None, # type: Optional[Union[str, "_models.ErrorHandlingMode"]] **kwargs # type: Any ): # type: (...) -> "_models.SubmitBulkRunResponse" """submit_bulk_run_async. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_run_id: :type bulk_run_id: str :param error_handling_mode: :type error_handling_mode: str or ~flow.models.ErrorHandlingMode :keyword callable cls: A custom type or function that will be passed the direct response :return: SubmitBulkRunResponse, or the result of cls(response) :rtype: ~flow.models.SubmitBulkRunResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SubmitBulkRunResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_submit_bulk_run_async_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_run_id=bulk_run_id, error_handling_mode=error_handling_mode, template_url=self.submit_bulk_run_async.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SubmitBulkRunResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized submit_bulk_run_async.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/submit'} # type: ignore @distributed_trace def send_policy_validation_async( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_run_id, # type: str **kwargs # type: Any ): # type: (...) -> "_models.PolicyValidationResponse" """send_policy_validation_async. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_run_id: :type bulk_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PolicyValidationResponse, or the result of cls(response) :rtype: ~flow.models.PolicyValidationResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyValidationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_send_policy_validation_async_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_run_id=bulk_run_id, template_url=self.send_policy_validation_async.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('PolicyValidationResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized send_policy_validation_async.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/policy'} # type: ignore @distributed_trace def check_policy_validation_async( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_run_id, # type: str **kwargs # type: Any ): # type: (...) -> "_models.PolicyValidationResponse" """check_policy_validation_async. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_run_id: :type bulk_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PolicyValidationResponse, or the result of cls(response) :rtype: ~flow.models.PolicyValidationResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyValidationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_check_policy_validation_async_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_run_id=bulk_run_id, template_url=self.check_policy_validation_async.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('PolicyValidationResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_policy_validation_async.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/policy'} # type: ignore @distributed_trace def log_result_for_bulk_run( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_run_id, # type: str **kwargs # type: Any ): # type: (...) -> List["_models.KeyValuePairStringObject"] """log_result_for_bulk_run. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_run_id: :type bulk_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of KeyValuePairStringObject, or the result of cls(response) :rtype: list[~flow.models.KeyValuePairStringObject] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.KeyValuePairStringObject"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_log_result_for_bulk_run_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_run_id=bulk_run_id, template_url=self.log_result_for_bulk_run.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[KeyValuePairStringObject]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized log_result_for_bulk_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/LogResult'} # type: ignore @distributed_trace def get_storage_info( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.StorageInfo" """get_storage_info. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageInfo, or the result of cls(response) :rtype: ~flow.models.StorageInfo :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageInfo"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_storage_info_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_storage_info.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('StorageInfo', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_storage_info.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/storageInfo'} # type: ignore @distributed_trace def log_flow_run_event( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str runtime_version, # type: str **kwargs # type: Any ): # type: (...) -> str """log_flow_run_event. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param runtime_version: :type runtime_version: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_log_flow_run_event_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, runtime_version=runtime_version, template_url=self.log_flow_run_event.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized log_flow_run_event.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/flowRuns/{flowRunId}/runtime/{runtimeVersion}/logEvent'} # type: ignore @distributed_trace def log_flow_run_event_v2( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str runtime_version=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> str """log_flow_run_event_v2. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param runtime_version: :type runtime_version: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_log_flow_run_event_v2_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, runtime_version=runtime_version, template_url=self.log_flow_run_event_v2.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized log_flow_run_event_v2.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/flowRuns/{flowRunId}/logEvent'} # type: ignore @distributed_trace def log_flow_run_terminated_event( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str last_checked_time=None, # type: Optional[datetime.datetime] **kwargs # type: Any ): # type: (...) -> "_models.LogRunTerminatedEventDto" """log_flow_run_terminated_event. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param last_checked_time: :type last_checked_time: ~datetime.datetime :keyword callable cls: A custom type or function that will be passed the direct response :return: LogRunTerminatedEventDto, or the result of cls(response) :rtype: ~flow.models.LogRunTerminatedEventDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.LogRunTerminatedEventDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_log_flow_run_terminated_event_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, last_checked_time=last_checked_time, template_url=self.log_flow_run_terminated_event.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('LogRunTerminatedEventDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized log_flow_run_terminated_event.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/flowRuns/{flowRunId}/logTerminatedEvent'} # type: ignore @distributed_trace def update_service_logs( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_run_id, # type: str body=None, # type: Optional["_models.ServiceLogRequest"] **kwargs # type: Any ): # type: (...) -> "_models.Task" """update_service_logs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_run_id: :type bulk_run_id: str :param body: :type body: ~flow.models.ServiceLogRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: Task, or the result of cls(response) :rtype: ~flow.models.Task :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.Task"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'ServiceLogRequest') else: _json = None request = build_update_service_logs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_run_id=bulk_run_id, content_type=content_type, json=_json, template_url=self.update_service_logs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('Task', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_service_logs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/serviceLogs'} # type: ignore @distributed_trace def batch_update_service_logs( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_run_id, # type: str body=None, # type: Optional[List["_models.ServiceLogRequest"]] **kwargs # type: Any ): # type: (...) -> "_models.Task" """batch_update_service_logs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_run_id: :type bulk_run_id: str :param body: :type body: list[~flow.models.ServiceLogRequest] :keyword callable cls: A custom type or function that will be passed the direct response :return: Task, or the result of cls(response) :rtype: ~flow.models.Task :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.Task"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, '[ServiceLogRequest]') else: _json = None request = build_batch_update_service_logs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_run_id=bulk_run_id, content_type=content_type, json=_json, template_url=self.batch_update_service_logs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('Task', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized batch_update_service_logs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/serviceLogs/batch'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_flow_runtimes_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False # fmt: off def build_create_runtime_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] async_call = kwargs.pop('async_call', False) # type: Optional[bool] msi_token = kwargs.pop('msi_token', False) # type: Optional[bool] skip_port_check = kwargs.pop('skip_port_check', False) # type: Optional[bool] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "runtimeName": _SERIALIZER.url("runtime_name", runtime_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if async_call is not None: query_parameters['asyncCall'] = _SERIALIZER.query("async_call", async_call, 'bool') if msi_token is not None: query_parameters['msiToken'] = _SERIALIZER.query("msi_token", msi_token, 'bool') if skip_port_check is not None: query_parameters['skipPortCheck'] = _SERIALIZER.query("skip_port_check", skip_port_check, 'bool') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_update_runtime_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] async_call = kwargs.pop('async_call', False) # type: Optional[bool] msi_token = kwargs.pop('msi_token', False) # type: Optional[bool] skip_port_check = kwargs.pop('skip_port_check', False) # type: Optional[bool] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "runtimeName": _SERIALIZER.url("runtime_name", runtime_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if async_call is not None: query_parameters['asyncCall'] = _SERIALIZER.query("async_call", async_call, 'bool') if msi_token is not None: query_parameters['msiToken'] = _SERIALIZER.query("msi_token", msi_token, 'bool') if skip_port_check is not None: query_parameters['skipPortCheck'] = _SERIALIZER.query("skip_port_check", skip_port_check, 'bool') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_runtime_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "runtimeName": _SERIALIZER.url("runtime_name", runtime_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_delete_runtime_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest async_call = kwargs.pop('async_call', False) # type: Optional[bool] msi_token = kwargs.pop('msi_token', False) # type: Optional[bool] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "runtimeName": _SERIALIZER.url("runtime_name", runtime_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if async_call is not None: query_parameters['asyncCall'] = _SERIALIZER.query("async_call", async_call, 'bool') if msi_token is not None: query_parameters['msiToken'] = _SERIALIZER.query("msi_token", msi_token, 'bool') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="DELETE", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_check_ci_availability_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest compute_instance_name = kwargs.pop('compute_instance_name') # type: str custom_app_name = kwargs.pop('custom_app_name') # type: str accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/checkCiAvailability') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['computeInstanceName'] = _SERIALIZER.query("compute_instance_name", compute_instance_name, 'str') query_parameters['customAppName'] = _SERIALIZER.query("custom_app_name", custom_app_name, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_check_mir_availability_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest endpoint_name = kwargs.pop('endpoint_name') # type: str deployment_name = kwargs.pop('deployment_name') # type: str accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/checkMirAvailability') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['endpointName'] = _SERIALIZER.query("endpoint_name", endpoint_name, 'str') query_parameters['deploymentName'] = _SERIALIZER.query("deployment_name", deployment_name, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_check_runtime_upgrade_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}/needUpgrade') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "runtimeName": _SERIALIZER.url("runtime_name", runtime_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_get_runtime_capability_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}/capability') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "runtimeName": _SERIALIZER.url("runtime_name", runtime_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_get_runtime_latest_config_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/latestConfig') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_list_runtimes_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) # fmt: on class FlowRuntimesOperations(object): """FlowRuntimesOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def create_runtime( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name, # type: str async_call=False, # type: Optional[bool] msi_token=False, # type: Optional[bool] skip_port_check=False, # type: Optional[bool] body=None, # type: Optional["_models.CreateFlowRuntimeRequest"] **kwargs # type: Any ): # type: (...) -> "_models.FlowRuntimeDto" """create_runtime. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :param async_call: :type async_call: bool :param msi_token: :type msi_token: bool :param skip_port_check: :type skip_port_check: bool :param body: :type body: ~flow.models.CreateFlowRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeDto, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowRuntimeRequest') else: _json = None request = build_create_runtime_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, content_type=content_type, json=_json, async_call=async_call, msi_token=msi_token, skip_port_check=skip_port_check, template_url=self.create_runtime.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_runtime.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}'} # type: ignore @distributed_trace def update_runtime( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name, # type: str async_call=False, # type: Optional[bool] msi_token=False, # type: Optional[bool] skip_port_check=False, # type: Optional[bool] body=None, # type: Optional["_models.UpdateFlowRuntimeRequest"] **kwargs # type: Any ): # type: (...) -> "_models.FlowRuntimeDto" """update_runtime. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :param async_call: :type async_call: bool :param msi_token: :type msi_token: bool :param skip_port_check: :type skip_port_check: bool :param body: :type body: ~flow.models.UpdateFlowRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeDto, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'UpdateFlowRuntimeRequest') else: _json = None request = build_update_runtime_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, content_type=content_type, json=_json, async_call=async_call, msi_token=msi_token, skip_port_check=skip_port_check, template_url=self.update_runtime.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_runtime.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}'} # type: ignore @distributed_trace def get_runtime( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.FlowRuntimeDto" """get_runtime. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeDto, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_runtime_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, template_url=self.get_runtime.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_runtime.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}'} # type: ignore @distributed_trace def delete_runtime( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name, # type: str async_call=False, # type: Optional[bool] msi_token=False, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> "_models.FlowRuntimeDto" """delete_runtime. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :param async_call: :type async_call: bool :param msi_token: :type msi_token: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeDto, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_runtime_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, async_call=async_call, msi_token=msi_token, template_url=self.delete_runtime.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_runtime.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}'} # type: ignore @distributed_trace def check_ci_availability( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str compute_instance_name, # type: str custom_app_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.AvailabilityResponse" """check_ci_availability. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param compute_instance_name: :type compute_instance_name: str :param custom_app_name: :type custom_app_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: AvailabilityResponse, or the result of cls(response) :rtype: ~flow.models.AvailabilityResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailabilityResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_check_ci_availability_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_instance_name=compute_instance_name, custom_app_name=custom_app_name, template_url=self.check_ci_availability.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('AvailabilityResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_ci_availability.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/checkCiAvailability'} # type: ignore @distributed_trace def check_mir_availability( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str endpoint_name, # type: str deployment_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.AvailabilityResponse" """check_mir_availability. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param endpoint_name: :type endpoint_name: str :param deployment_name: :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: AvailabilityResponse, or the result of cls(response) :rtype: ~flow.models.AvailabilityResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailabilityResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_check_mir_availability_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, template_url=self.check_mir_availability.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('AvailabilityResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_mir_availability.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/checkMirAvailability'} # type: ignore @distributed_trace def check_runtime_upgrade( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name, # type: str **kwargs # type: Any ): # type: (...) -> bool """check_runtime_upgrade. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: bool, or the result of cls(response) :rtype: bool :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[bool] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_check_runtime_upgrade_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, template_url=self.check_runtime_upgrade.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('bool', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_runtime_upgrade.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}/needUpgrade'} # type: ignore @distributed_trace def get_runtime_capability( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.FlowRuntimeCapability" """get_runtime_capability. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeCapability, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeCapability :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeCapability"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_runtime_capability_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, template_url=self.get_runtime_capability.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeCapability', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_runtime_capability.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}/capability'} # type: ignore @distributed_trace def get_runtime_latest_config( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.RuntimeConfiguration" """get_runtime_latest_config. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: RuntimeConfiguration, or the result of cls(response) :rtype: ~flow.models.RuntimeConfiguration :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.RuntimeConfiguration"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_runtime_latest_config_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_runtime_latest_config.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('RuntimeConfiguration', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_runtime_latest_config.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/latestConfig'} # type: ignore @distributed_trace def list_runtimes( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> List["_models.FlowRuntimeDto"] """list_runtimes. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of FlowRuntimeDto, or the result of cls(response) :rtype: list[~flow.models.FlowRuntimeDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.FlowRuntimeDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_runtimes_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.list_runtimes.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[FlowRuntimeDto]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_runtimes.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_tools_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False # fmt: off def build_get_tool_setting_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/setting') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_get_samples_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/samples') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_get_tool_meta_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] tool_name = kwargs.pop('tool_name') # type: str tool_type = kwargs.pop('tool_type') # type: str endpoint_name = kwargs.pop('endpoint_name', None) # type: Optional[str] flow_runtime_name = kwargs.pop('flow_runtime_name', None) # type: Optional[str] flow_id = kwargs.pop('flow_id', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['toolName'] = _SERIALIZER.query("tool_name", tool_name, 'str') query_parameters['toolType'] = _SERIALIZER.query("tool_type", tool_type, 'str') if endpoint_name is not None: query_parameters['endpointName'] = _SERIALIZER.query("endpoint_name", endpoint_name, 'str') if flow_runtime_name is not None: query_parameters['flowRuntimeName'] = _SERIALIZER.query("flow_runtime_name", flow_runtime_name, 'str') if flow_id is not None: query_parameters['flowId'] = _SERIALIZER.query("flow_id", flow_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_tool_meta_v2_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] flow_runtime_name = kwargs.pop('flow_runtime_name', None) # type: Optional[str] flow_id = kwargs.pop('flow_id', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta-v2') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if flow_runtime_name is not None: query_parameters['flowRuntimeName'] = _SERIALIZER.query("flow_runtime_name", flow_runtime_name, 'str') if flow_id is not None: query_parameters['flowId'] = _SERIALIZER.query("flow_id", flow_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_package_tools_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest flow_runtime_name = kwargs.pop('flow_runtime_name', None) # type: Optional[str] flow_id = kwargs.pop('flow_id', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/packageTools') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if flow_runtime_name is not None: query_parameters['flowRuntimeName'] = _SERIALIZER.query("flow_runtime_name", flow_runtime_name, 'str') if flow_id is not None: query_parameters['flowId'] = _SERIALIZER.query("flow_id", flow_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_dynamic_list_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] flow_runtime_name = kwargs.pop('flow_runtime_name', None) # type: Optional[str] flow_id = kwargs.pop('flow_id', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/dynamicList') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if flow_runtime_name is not None: query_parameters['flowRuntimeName'] = _SERIALIZER.query("flow_runtime_name", flow_runtime_name, 'str') if flow_id is not None: query_parameters['flowId'] = _SERIALIZER.query("flow_id", flow_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_retrieve_tool_func_result_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] flow_runtime_name = kwargs.pop('flow_runtime_name', None) # type: Optional[str] flow_id = kwargs.pop('flow_id', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/RetrieveToolFuncResult') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if flow_runtime_name is not None: query_parameters['flowRuntimeName'] = _SERIALIZER.query("flow_runtime_name", flow_runtime_name, 'str') if flow_id is not None: query_parameters['flowId'] = _SERIALIZER.query("flow_id", flow_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) # fmt: on class ToolsOperations(object): """ToolsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def get_tool_setting( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.ToolSetting" """get_tool_setting. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ToolSetting, or the result of cls(response) :rtype: ~flow.models.ToolSetting :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ToolSetting"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_tool_setting_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_tool_setting.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ToolSetting', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_tool_setting.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/setting'} # type: ignore @distributed_trace def get_samples( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> Dict[str, "_models.Tool"] """get_samples. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: dict mapping str to Tool, or the result of cls(response) :rtype: dict[str, ~flow.models.Tool] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.Tool"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_samples_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_samples.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('{Tool}', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_samples.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/samples'} # type: ignore @distributed_trace def get_tool_meta( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str tool_name, # type: str tool_type, # type: str endpoint_name=None, # type: Optional[str] flow_runtime_name=None, # type: Optional[str] flow_id=None, # type: Optional[str] data=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> str """get_tool_meta. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param tool_name: :type tool_name: str :param tool_type: :type tool_type: str :param endpoint_name: :type endpoint_name: str :param flow_runtime_name: :type flow_runtime_name: str :param flow_id: :type flow_id: str :param data: :type data: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "text/plain") # type: Optional[str] _content = data request = build_get_tool_meta_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, tool_name=tool_name, tool_type=tool_type, content=_content, endpoint_name=endpoint_name, flow_runtime_name=flow_runtime_name, flow_id=flow_id, template_url=self.get_tool_meta.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_tool_meta.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta'} # type: ignore @distributed_trace def get_tool_meta_v2( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_runtime_name=None, # type: Optional[str] flow_id=None, # type: Optional[str] body=None, # type: Optional["_models.GenerateToolMetaRequest"] **kwargs # type: Any ): # type: (...) -> "_models.ToolMetaDto" """get_tool_meta_v2. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_runtime_name: :type flow_runtime_name: str :param flow_id: :type flow_id: str :param body: :type body: ~flow.models.GenerateToolMetaRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ToolMetaDto, or the result of cls(response) :rtype: ~flow.models.ToolMetaDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ToolMetaDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'GenerateToolMetaRequest') else: _json = None request = build_get_tool_meta_v2_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, flow_runtime_name=flow_runtime_name, flow_id=flow_id, template_url=self.get_tool_meta_v2.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ToolMetaDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_tool_meta_v2.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta-v2'} # type: ignore @distributed_trace def get_package_tools( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_runtime_name=None, # type: Optional[str] flow_id=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> Dict[str, "_models.Tool"] """get_package_tools. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_runtime_name: :type flow_runtime_name: str :param flow_id: :type flow_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: dict mapping str to Tool, or the result of cls(response) :rtype: dict[str, ~flow.models.Tool] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.Tool"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_package_tools_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_runtime_name=flow_runtime_name, flow_id=flow_id, template_url=self.get_package_tools.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('{Tool}', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_package_tools.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/packageTools'} # type: ignore @distributed_trace def get_dynamic_list( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_runtime_name=None, # type: Optional[str] flow_id=None, # type: Optional[str] body=None, # type: Optional["_models.GetDynamicListRequest"] **kwargs # type: Any ): # type: (...) -> List[Any] """get_dynamic_list. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_runtime_name: :type flow_runtime_name: str :param flow_id: :type flow_id: str :param body: :type body: ~flow.models.GetDynamicListRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: list of any, or the result of cls(response) :rtype: list[any] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[Any]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'GetDynamicListRequest') else: _json = None request = build_get_dynamic_list_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, flow_runtime_name=flow_runtime_name, flow_id=flow_id, template_url=self.get_dynamic_list.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[object]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_dynamic_list.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/dynamicList'} # type: ignore @distributed_trace def retrieve_tool_func_result( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_runtime_name=None, # type: Optional[str] flow_id=None, # type: Optional[str] body=None, # type: Optional["_models.RetrieveToolFuncResultRequest"] **kwargs # type: Any ): # type: (...) -> "_models.ToolFuncResponse" """retrieve_tool_func_result. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_runtime_name: :type flow_runtime_name: str :param flow_id: :type flow_id: str :param body: :type body: ~flow.models.RetrieveToolFuncResultRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ToolFuncResponse, or the result of cls(response) :rtype: ~flow.models.ToolFuncResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ToolFuncResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'RetrieveToolFuncResultRequest') else: _json = None request = build_retrieve_tool_func_result_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, flow_runtime_name=flow_runtime_name, flow_id=flow_id, template_url=self.retrieve_tool_func_result.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ToolFuncResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized retrieve_tool_func_result.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/RetrieveToolFuncResult'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_flows_provider_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False # fmt: off def build_get_index_entity_by_id_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/v1.0/flows/getIndexEntities') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_get_updated_entity_ids_for_workspace_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/v1.0/flows/rebuildIndex') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) # fmt: on class FlowsProviderOperations(object): """FlowsProviderOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def get_index_entity_by_id( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str body=None, # type: Optional["_models.UnversionedEntityRequestDto"] **kwargs # type: Any ): # type: (...) -> "_models.UnversionedEntityResponseDto" """get_index_entity_by_id. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param body: :type body: ~flow.models.UnversionedEntityRequestDto :keyword callable cls: A custom type or function that will be passed the direct response :return: UnversionedEntityResponseDto, or the result of cls(response) :rtype: ~flow.models.UnversionedEntityResponseDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.UnversionedEntityResponseDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'UnversionedEntityRequestDto') else: _json = None request = build_get_index_entity_by_id_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.get_index_entity_by_id.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('UnversionedEntityResponseDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_index_entity_by_id.metadata = {'url': '/flow/v1.0/flows/getIndexEntities'} # type: ignore @distributed_trace def get_updated_entity_ids_for_workspace( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str body=None, # type: Optional["_models.UnversionedRebuildIndexDto"] **kwargs # type: Any ): # type: (...) -> "_models.UnversionedRebuildResponseDto" """get_updated_entity_ids_for_workspace. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param body: :type body: ~flow.models.UnversionedRebuildIndexDto :keyword callable cls: A custom type or function that will be passed the direct response :return: UnversionedRebuildResponseDto, or the result of cls(response) :rtype: ~flow.models.UnversionedRebuildResponseDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.UnversionedRebuildResponseDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'UnversionedRebuildIndexDto') else: _json = None request = build_get_updated_entity_ids_for_workspace_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.get_updated_entity_ids_for_workspace.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('UnversionedRebuildResponseDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_updated_entity_ids_for_workspace.metadata = {'url': '/flow/v1.0/flows/rebuildIndex'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/__init__.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from ._bulk_runs_operations import BulkRunsOperations from ._connection_operations import ConnectionOperations from ._connections_operations import ConnectionsOperations from ._flow_runs_admin_operations import FlowRunsAdminOperations from ._flow_runtimes_operations import FlowRuntimesOperations from ._flow_runtimes_workspace_independent_operations import FlowRuntimesWorkspaceIndependentOperations from ._flows_operations import FlowsOperations from ._flow_sessions_operations import FlowSessionsOperations from ._flows_provider_operations import FlowsProviderOperations from ._tools_operations import ToolsOperations __all__ = [ 'BulkRunsOperations', 'ConnectionOperations', 'ConnectionsOperations', 'FlowRunsAdminOperations', 'FlowRuntimesOperations', 'FlowRuntimesWorkspaceIndependentOperations', 'FlowsOperations', 'FlowSessionsOperations', 'FlowsProviderOperations', 'ToolsOperations', ]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_flow_sessions_admin_operations.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- # coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False # fmt: off def build_create_flow_session_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str session_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] waitfor_completion = kwargs.pop('waitfor_completion', False) # type: Optional[bool] accept = "text/plain, application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessionsAdmin/{sessionId}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "sessionId": _SERIALIZER.url("session_id", session_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if waitfor_completion is not None: query_parameters['waitforCompletion'] = _SERIALIZER.query("waitfor_completion", waitfor_completion, 'bool') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) # fmt: on class FlowSessionsAdminOperations(object): """FlowSessionsAdminOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def create_flow_session( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str session_id, # type: str waitfor_completion=False, # type: Optional[bool] body=None, # type: Optional["_models.CreateFlowSessionRequest"] **kwargs # type: Any ): # type: (...) -> str """create_flow_session. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param session_id: :type session_id: str :param waitfor_completion: :type waitfor_completion: bool :param body: :type body: ~flow.models.CreateFlowSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowSessionRequest') else: _json = None request = build_create_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, session_id=session_id, content_type=content_type, json=_json, waitfor_completion=waitfor_completion, template_url=self.create_flow_session.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_flow_session.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessionsAdmin/{sessionId}'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_flow_sessions_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False # fmt: off def build_create_flow_session_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str session_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "sessionId": _SERIALIZER.url("session_id", session_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_get_flow_session_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str session_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "sessionId": _SERIALIZER.url("session_id", session_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_delete_flow_session_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str session_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "sessionId": _SERIALIZER.url("session_id", session_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="DELETE", url=url, headers=header_parameters, **kwargs ) def build_poll_operation_status_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str session_id, # type: str action_type, # type: Union[str, "_models.SetupFlowSessionAction"] location, # type: str operation_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest api_version = kwargs.pop('api_version', "1.0.0") # type: Optional[str] type = kwargs.pop('type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}/{actionType}/locations/{location}/operations/{operationId}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "sessionId": _SERIALIZER.url("session_id", session_id, 'str'), "actionType": _SERIALIZER.url("action_type", action_type, 'str'), "location": _SERIALIZER.url("location", location, 'str'), "operationId": _SERIALIZER.url("operation_id", operation_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if api_version is not None: query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') if type is not None: query_parameters['type'] = _SERIALIZER.query("type", type, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_standby_pools_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/standbypools') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) # fmt: on class FlowSessionsOperations(object): """FlowSessionsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def create_flow_session( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str session_id, # type: str body=None, # type: Optional["_models.CreateFlowSessionRequest"] **kwargs # type: Any ): # type: (...) -> Any """create_flow_session. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param session_id: :type session_id: str :param body: :type body: ~flow.models.CreateFlowSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: any, or the result of cls(response) :rtype: any :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Any] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowSessionRequest') else: _json = None request = build_create_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, session_id=session_id, content_type=content_type, json=_json, template_url=self.create_flow_session.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: deserialized = self._deserialize('object', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('object', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_flow_session.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}'} # type: ignore @distributed_trace def get_flow_session( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str session_id, # type: str **kwargs # type: Any ): # type: (...) -> "_models.GetTrainingSessionDto" """get_flow_session. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param session_id: :type session_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: GetTrainingSessionDto, or the result of cls(response) :rtype: ~flow.models.GetTrainingSessionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.GetTrainingSessionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, session_id=session_id, template_url=self.get_flow_session.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('GetTrainingSessionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_session.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}'} # type: ignore @distributed_trace def delete_flow_session( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str session_id, # type: str **kwargs # type: Any ): # type: (...) -> Any """delete_flow_session. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param session_id: :type session_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: any, or the result of cls(response) :rtype: any :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Any] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, session_id=session_id, template_url=self.delete_flow_session.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: deserialized = self._deserialize('object', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('object', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_flow_session.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}'} # type: ignore @distributed_trace def poll_operation_status( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str session_id, # type: str action_type, # type: Union[str, "_models.SetupFlowSessionAction"] location, # type: str operation_id, # type: str api_version="1.0.0", # type: Optional[str] type=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> Any """poll_operation_status. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param session_id: :type session_id: str :param action_type: :type action_type: str or ~flow.models.SetupFlowSessionAction :param location: :type location: str :param operation_id: :type operation_id: str :param api_version: Api Version. The default value is "1.0.0". :type api_version: str :param type: :type type: str :keyword callable cls: A custom type or function that will be passed the direct response :return: any, or the result of cls(response) :rtype: any :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Any] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_poll_operation_status_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, session_id=session_id, action_type=action_type, location=location, operation_id=operation_id, api_version=api_version, type=type, template_url=self.poll_operation_status.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('object', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized poll_operation_status.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}/{actionType}/locations/{location}/operations/{operationId}'} # type: ignore @distributed_trace def get_standby_pools( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> List["_models.StandbyPoolProperties"] """get_standby_pools. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of StandbyPoolProperties, or the result of cls(response) :rtype: list[~flow.models.StandbyPoolProperties] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.StandbyPoolProperties"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_standby_pools_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_standby_pools.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[StandbyPoolProperties]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_standby_pools.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/standbypools'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_flows_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False # fmt: off def build_create_flow_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] experiment_id = kwargs.pop('experiment_id', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if experiment_id is not None: query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_list_flows_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest experiment_id = kwargs.pop('experiment_id', None) # type: Optional[str] owned_only = kwargs.pop('owned_only', None) # type: Optional[bool] flow_type = kwargs.pop('flow_type', None) # type: Optional[Union[str, "_models.FlowType"]] list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if experiment_id is not None: query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') if owned_only is not None: query_parameters['ownedOnly'] = _SERIALIZER.query("owned_only", owned_only, 'bool') if flow_type is not None: query_parameters['flowType'] = _SERIALIZER.query("flow_type", flow_type, 'str') if list_view_type is not None: query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_clone_flow_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] experiment_id = kwargs.pop('experiment_id') # type: str accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/clone') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_create_flow_from_sample_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] experiment_id = kwargs.pop('experiment_id', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/fromsample') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if experiment_id is not None: query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_update_flow_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] experiment_id = kwargs.pop('experiment_id') # type: str accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_patch_flow_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] experiment_id = kwargs.pop('experiment_id') # type: str accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PATCH", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_flow_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest experiment_id = kwargs.pop('experiment_id') # type: str accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_submit_flow_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] experiment_id = kwargs.pop('experiment_id') # type: str endpoint_name = kwargs.pop('endpoint_name', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/submit') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') if endpoint_name is not None: query_parameters['endpointName'] = _SERIALIZER.query("endpoint_name", endpoint_name, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_flow_run_status_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest experiment_id = kwargs.pop('experiment_id', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/{flowRunId}/status') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if experiment_id is not None: query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_flow_run_info_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest experiment_id = kwargs.pop('experiment_id') # type: str accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_flow_child_runs_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest index = kwargs.pop('index', None) # type: Optional[int] start_index = kwargs.pop('start_index', None) # type: Optional[int] end_index = kwargs.pop('end_index', None) # type: Optional[int] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/childRuns') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if index is not None: query_parameters['index'] = _SERIALIZER.query("index", index, 'int') if start_index is not None: query_parameters['startIndex'] = _SERIALIZER.query("start_index", start_index, 'int') if end_index is not None: query_parameters['endIndex'] = _SERIALIZER.query("end_index", end_index, 'int') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_flow_node_runs_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str node_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest index = kwargs.pop('index', None) # type: Optional[int] start_index = kwargs.pop('start_index', None) # type: Optional[int] end_index = kwargs.pop('end_index', None) # type: Optional[int] aggregation = kwargs.pop('aggregation', False) # type: Optional[bool] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/nodeRuns/{nodeName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), "nodeName": _SERIALIZER.url("node_name", node_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if index is not None: query_parameters['index'] = _SERIALIZER.query("index", index, 'int') if start_index is not None: query_parameters['startIndex'] = _SERIALIZER.query("start_index", start_index, 'int') if end_index is not None: query_parameters['endIndex'] = _SERIALIZER.query("end_index", end_index, 'int') if aggregation is not None: query_parameters['aggregation'] = _SERIALIZER.query("aggregation", aggregation, 'bool') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_flow_node_run_base_path_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str node_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/nodeRuns/{nodeName}/basePath') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), "nodeName": _SERIALIZER.url("node_name", node_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_clone_flow_from_flow_run_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] experiment_id = kwargs.pop('experiment_id') # type: str accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/clone') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_list_bulk_tests_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest experiment_id = kwargs.pop('experiment_id', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/bulkTests') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if experiment_id is not None: query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_bulk_test_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_test_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/bulkTests/{bulkTestId}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "bulkTestId": _SERIALIZER.url("bulk_test_id", bulk_test_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_get_samples_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest use_snapshot = kwargs.pop('use_snapshot', False) # type: Optional[bool] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/samples') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if use_snapshot is not None: query_parameters['useSnapshot'] = _SERIALIZER.query("use_snapshot", use_snapshot, 'bool') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_evaluate_flow_samples_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest use_snapshot = kwargs.pop('use_snapshot', False) # type: Optional[bool] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/evaluateSamples') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if use_snapshot is not None: query_parameters['useSnapshot'] = _SERIALIZER.query("use_snapshot", use_snapshot, 'bool') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_flow_deploy_reserved_environment_variable_names_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/DeployReservedEnvironmentVariableNames') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_deploy_flow_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] async_call = kwargs.pop('async_call', False) # type: Optional[bool] msi_token = kwargs.pop('msi_token', False) # type: Optional[bool] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/deploy') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if async_call is not None: query_parameters['asyncCall'] = _SERIALIZER.query("async_call", async_call, 'bool') if msi_token is not None: query_parameters['msiToken'] = _SERIALIZER.query("msi_token", msi_token, 'bool') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_flow_run_log_content_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/logContent') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_cancel_flow_run_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "text/plain, application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/runs/{flowRunId}/cancel') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_cancel_flow_test_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "text/plain, application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/flowTests/{flowRunId}/cancel') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_cancel_bulk_test_run_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str bulk_test_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "text/plain, application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/bulkTests/{bulkTestRunId}/cancel') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "bulkTestRunId": _SERIALIZER.url("bulk_test_run_id", bulk_test_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_get_flow_snapshot_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/FlowSnapshot') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_get_connection_override_settings_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] runtime_name = kwargs.pop('runtime_name', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/connectionOverride') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if runtime_name is not None: query_parameters['runtimeName'] = _SERIALIZER.query("runtime_name", runtime_name, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_flow_inputs_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/flowInputs') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_load_as_component_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/LoadAsComponent') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_get_flow_tools_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest experiment_id = kwargs.pop('experiment_id') # type: str flow_runtime_name = kwargs.pop('flow_runtime_name', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/flowTools') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if flow_runtime_name is not None: query_parameters['flowRuntimeName'] = _SERIALIZER.query("flow_runtime_name", flow_runtime_name, 'str') query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_setup_flow_session_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] experiment_id = kwargs.pop('experiment_id') # type: str accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/sessions') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_delete_flow_session_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest experiment_id = kwargs.pop('experiment_id') # type: str accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/sessions') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="DELETE", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_flow_session_status_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest experiment_id = kwargs.pop('experiment_id') # type: str accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/sessions/status') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowId": _SERIALIZER.url("flow_id", flow_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['experimentId'] = _SERIALIZER.query("experiment_id", experiment_id, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) # fmt: on class FlowsOperations(object): """FlowsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def create_flow( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str experiment_id=None, # type: Optional[str] body=None, # type: Optional["_models.CreateFlowRequest"] **kwargs # type: Any ): # type: (...) -> "_models.FlowDto" """create_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.CreateFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowDto, or the result of cls(response) :rtype: ~flow.models.FlowDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowRequest') else: _json = None request = build_create_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, experiment_id=experiment_id, template_url=self.create_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows'} # type: ignore @distributed_trace def list_flows( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str experiment_id=None, # type: Optional[str] owned_only=None, # type: Optional[bool] flow_type=None, # type: Optional[Union[str, "_models.FlowType"]] list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] **kwargs # type: Any ): # type: (...) -> List["_models.FlowBaseDto"] """list_flows. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param experiment_id: :type experiment_id: str :param owned_only: :type owned_only: bool :param flow_type: :type flow_type: str or ~flow.models.FlowType :param list_view_type: :type list_view_type: str or ~flow.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response :return: list of FlowBaseDto, or the result of cls(response) :rtype: list[~flow.models.FlowBaseDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.FlowBaseDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_flows_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, experiment_id=experiment_id, owned_only=owned_only, flow_type=flow_type, list_view_type=list_view_type, template_url=self.list_flows.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[FlowBaseDto]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_flows.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows'} # type: ignore @distributed_trace def clone_flow( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str experiment_id, # type: str body=None, # type: Optional["_models.CreateFlowRequest"] **kwargs # type: Any ): # type: (...) -> "_models.FlowDto" """clone_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.CreateFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowDto, or the result of cls(response) :rtype: ~flow.models.FlowDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowRequest') else: _json = None request = build_clone_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, content_type=content_type, experiment_id=experiment_id, json=_json, template_url=self.clone_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized clone_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/clone'} # type: ignore @distributed_trace def create_flow_from_sample( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str experiment_id=None, # type: Optional[str] body=None, # type: Optional["_models.CreateFlowFromSampleRequest"] **kwargs # type: Any ): # type: (...) -> "_models.FlowDto" """create_flow_from_sample. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.CreateFlowFromSampleRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowDto, or the result of cls(response) :rtype: ~flow.models.FlowDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowFromSampleRequest') else: _json = None request = build_create_flow_from_sample_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, experiment_id=experiment_id, template_url=self.create_flow_from_sample.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_flow_from_sample.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/fromsample'} # type: ignore @distributed_trace def update_flow( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str experiment_id, # type: str body=None, # type: Optional["_models.UpdateFlowRequest"] **kwargs # type: Any ): # type: (...) -> str """update_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.UpdateFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'UpdateFlowRequest') else: _json = None request = build_update_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, content_type=content_type, experiment_id=experiment_id, json=_json, template_url=self.update_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}'} # type: ignore @distributed_trace def patch_flow( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str experiment_id, # type: str body=None, # type: Optional["_models.PatchFlowRequest"] **kwargs # type: Any ): # type: (...) -> str """patch_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.PatchFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json-patch+json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'PatchFlowRequest') else: _json = None request = build_patch_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, content_type=content_type, experiment_id=experiment_id, json=_json, template_url=self.patch_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized patch_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}'} # type: ignore @distributed_trace def get_flow( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str experiment_id, # type: str **kwargs # type: Any ): # type: (...) -> "_models.FlowDto" """get_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowDto, or the result of cls(response) :rtype: ~flow.models.FlowDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, experiment_id=experiment_id, template_url=self.get_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}'} # type: ignore @distributed_trace def submit_flow( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str experiment_id, # type: str endpoint_name=None, # type: Optional[str] body=None, # type: Optional["_models.SubmitFlowRequest"] **kwargs # type: Any ): # type: (...) -> "_models.FlowRunResult" """submit_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param experiment_id: :type experiment_id: str :param endpoint_name: :type endpoint_name: str :param body: :type body: ~flow.models.SubmitFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRunResult, or the result of cls(response) :rtype: ~flow.models.FlowRunResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRunResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'SubmitFlowRequest') else: _json = None request = build_submit_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, experiment_id=experiment_id, json=_json, endpoint_name=endpoint_name, template_url=self.submit_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRunResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized submit_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/submit'} # type: ignore @distributed_trace def get_flow_run_status( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str experiment_id=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> "_models.FlowRunResult" """get_flow_run_status. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param experiment_id: :type experiment_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRunResult, or the result of cls(response) :rtype: ~flow.models.FlowRunResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRunResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_run_status_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, experiment_id=experiment_id, template_url=self.get_flow_run_status.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRunResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_run_status.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/{flowRunId}/status'} # type: ignore @distributed_trace def get_flow_run_info( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str experiment_id, # type: str **kwargs # type: Any ): # type: (...) -> "_models.FlowRunInfo" """get_flow_run_info. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param experiment_id: :type experiment_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRunInfo, or the result of cls(response) :rtype: ~flow.models.FlowRunInfo :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRunInfo"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_run_info_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, experiment_id=experiment_id, template_url=self.get_flow_run_info.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRunInfo', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_run_info.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}'} # type: ignore @distributed_trace def get_flow_child_runs( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str index=None, # type: Optional[int] start_index=None, # type: Optional[int] end_index=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> List[Any] """get_flow_child_runs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param index: :type index: int :param start_index: :type start_index: int :param end_index: :type end_index: int :keyword callable cls: A custom type or function that will be passed the direct response :return: list of any, or the result of cls(response) :rtype: list[any] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[Any]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_child_runs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, index=index, start_index=start_index, end_index=end_index, template_url=self.get_flow_child_runs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[object]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_child_runs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/childRuns'} # type: ignore @distributed_trace def get_flow_node_runs( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str node_name, # type: str index=None, # type: Optional[int] start_index=None, # type: Optional[int] end_index=None, # type: Optional[int] aggregation=False, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> List[Any] """get_flow_node_runs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param node_name: :type node_name: str :param index: :type index: int :param start_index: :type start_index: int :param end_index: :type end_index: int :param aggregation: :type aggregation: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: list of any, or the result of cls(response) :rtype: list[any] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[Any]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_node_runs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, node_name=node_name, index=index, start_index=start_index, end_index=end_index, aggregation=aggregation, template_url=self.get_flow_node_runs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[object]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_node_runs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/nodeRuns/{nodeName}'} # type: ignore @distributed_trace def get_flow_node_run_base_path( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str node_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.FlowRunBasePath" """get_flow_node_run_base_path. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param node_name: :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRunBasePath, or the result of cls(response) :rtype: ~flow.models.FlowRunBasePath :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRunBasePath"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_node_run_base_path_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, node_name=node_name, template_url=self.get_flow_node_run_base_path.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRunBasePath', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_node_run_base_path.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/nodeRuns/{nodeName}/basePath'} # type: ignore @distributed_trace def clone_flow_from_flow_run( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str experiment_id, # type: str body=None, # type: Optional["_models.CreateFlowRequest"] **kwargs # type: Any ): # type: (...) -> "_models.FlowDto" """clone_flow_from_flow_run. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.CreateFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowDto, or the result of cls(response) :rtype: ~flow.models.FlowDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowRequest') else: _json = None request = build_clone_flow_from_flow_run_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, content_type=content_type, experiment_id=experiment_id, json=_json, template_url=self.clone_flow_from_flow_run.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized clone_flow_from_flow_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/clone'} # type: ignore @distributed_trace def list_bulk_tests( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str experiment_id=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> List["_models.BulkTestDto"] """list_bulk_tests. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of BulkTestDto, or the result of cls(response) :rtype: list[~flow.models.BulkTestDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.BulkTestDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_bulk_tests_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, experiment_id=experiment_id, template_url=self.list_bulk_tests.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[BulkTestDto]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_bulk_tests.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/bulkTests'} # type: ignore @distributed_trace def get_bulk_test( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str bulk_test_id, # type: str **kwargs # type: Any ): # type: (...) -> "_models.BulkTestDto" """get_bulk_test. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_test_id: :type bulk_test_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: BulkTestDto, or the result of cls(response) :rtype: ~flow.models.BulkTestDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.BulkTestDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_bulk_test_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_test_id=bulk_test_id, template_url=self.get_bulk_test.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('BulkTestDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_bulk_test.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/bulkTests/{bulkTestId}'} # type: ignore @distributed_trace def get_samples( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str use_snapshot=False, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> Dict[str, "_models.FlowSampleDto"] """get_samples. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param use_snapshot: :type use_snapshot: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: dict mapping str to FlowSampleDto, or the result of cls(response) :rtype: dict[str, ~flow.models.FlowSampleDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.FlowSampleDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_samples_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, use_snapshot=use_snapshot, template_url=self.get_samples.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('{FlowSampleDto}', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_samples.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/samples'} # type: ignore @distributed_trace def get_evaluate_flow_samples( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str use_snapshot=False, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> Dict[str, "_models.FlowSampleDto"] """get_evaluate_flow_samples. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param use_snapshot: :type use_snapshot: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: dict mapping str to FlowSampleDto, or the result of cls(response) :rtype: dict[str, ~flow.models.FlowSampleDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.FlowSampleDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_evaluate_flow_samples_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, use_snapshot=use_snapshot, template_url=self.get_evaluate_flow_samples.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('{FlowSampleDto}', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_evaluate_flow_samples.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/evaluateSamples'} # type: ignore @distributed_trace def get_flow_deploy_reserved_environment_variable_names( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> List[str] """get_flow_deploy_reserved_environment_variable_names. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of str, or the result of cls(response) :rtype: list[str] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[str]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_deploy_reserved_environment_variable_names_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_flow_deploy_reserved_environment_variable_names.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[str]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_deploy_reserved_environment_variable_names.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/DeployReservedEnvironmentVariableNames'} # type: ignore @distributed_trace def deploy_flow( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str async_call=False, # type: Optional[bool] msi_token=False, # type: Optional[bool] body=None, # type: Optional["_models.DeployFlowRequest"] **kwargs # type: Any ): # type: (...) -> str """deploy_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param async_call: :type async_call: bool :param msi_token: :type msi_token: bool :param body: :type body: ~flow.models.DeployFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'DeployFlowRequest') else: _json = None request = build_deploy_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, async_call=async_call, msi_token=msi_token, template_url=self.deploy_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized deploy_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/deploy'} # type: ignore @distributed_trace def get_flow_run_log_content( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> str """get_flow_run_log_content. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_run_log_content_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, template_url=self.get_flow_run_log_content.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_run_log_content.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/logContent'} # type: ignore @distributed_trace def cancel_flow_run( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> str """cancel_flow_run. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_run_id: :type flow_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_cancel_flow_run_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_run_id=flow_run_id, template_url=self.cancel_flow_run.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized cancel_flow_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/runs/{flowRunId}/cancel'} # type: ignore @distributed_trace def cancel_flow_test( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> str """cancel_flow_test. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_cancel_flow_test_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, template_url=self.cancel_flow_test.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized cancel_flow_test.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/flowTests/{flowRunId}/cancel'} # type: ignore @distributed_trace def cancel_bulk_test_run( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str bulk_test_run_id, # type: str **kwargs # type: Any ): # type: (...) -> str """cancel_bulk_test_run. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param bulk_test_run_id: :type bulk_test_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_cancel_bulk_test_run_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, bulk_test_run_id=bulk_test_run_id, template_url=self.cancel_bulk_test_run.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized cancel_bulk_test_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/bulkTests/{bulkTestRunId}/cancel'} # type: ignore @distributed_trace def get_flow_snapshot( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str body=None, # type: Optional["_models.CreateFlowRequest"] **kwargs # type: Any ): # type: (...) -> "_models.FlowSnapshot" """get_flow_snapshot. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param body: :type body: ~flow.models.CreateFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowSnapshot, or the result of cls(response) :rtype: ~flow.models.FlowSnapshot :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowSnapshot"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowRequest') else: _json = None request = build_get_flow_snapshot_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.get_flow_snapshot.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowSnapshot', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_snapshot.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/FlowSnapshot'} # type: ignore @distributed_trace def get_connection_override_settings( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str runtime_name=None, # type: Optional[str] body=None, # type: Optional["_models.FlowGraphReference"] **kwargs # type: Any ): # type: (...) -> List["_models.ConnectionOverrideSetting"] """get_connection_override_settings. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :param body: :type body: ~flow.models.FlowGraphReference :keyword callable cls: A custom type or function that will be passed the direct response :return: list of ConnectionOverrideSetting, or the result of cls(response) :rtype: list[~flow.models.ConnectionOverrideSetting] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.ConnectionOverrideSetting"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'FlowGraphReference') else: _json = None request = build_get_connection_override_settings_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, runtime_name=runtime_name, template_url=self.get_connection_override_settings.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[ConnectionOverrideSetting]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_connection_override_settings.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/connectionOverride'} # type: ignore @distributed_trace def get_flow_inputs( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str body=None, # type: Optional["_models.FlowGraphReference"] **kwargs # type: Any ): # type: (...) -> Dict[str, "_models.FlowInputDefinition"] """get_flow_inputs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param body: :type body: ~flow.models.FlowGraphReference :keyword callable cls: A custom type or function that will be passed the direct response :return: dict mapping str to FlowInputDefinition, or the result of cls(response) :rtype: dict[str, ~flow.models.FlowInputDefinition] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.FlowInputDefinition"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'FlowGraphReference') else: _json = None request = build_get_flow_inputs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.get_flow_inputs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('{FlowInputDefinition}', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_inputs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/flowInputs'} # type: ignore @distributed_trace def load_as_component( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str body=None, # type: Optional["_models.LoadFlowAsComponentRequest"] **kwargs # type: Any ): # type: (...) -> str """load_as_component. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param body: :type body: ~flow.models.LoadFlowAsComponentRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'LoadFlowAsComponentRequest') else: _json = None request = build_load_as_component_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.load_as_component.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized load_as_component.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/LoadAsComponent'} # type: ignore @distributed_trace def get_flow_tools( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str experiment_id, # type: str flow_runtime_name=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> "_models.FlowToolsDto" """get_flow_tools. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :param flow_runtime_name: :type flow_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowToolsDto, or the result of cls(response) :rtype: ~flow.models.FlowToolsDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowToolsDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_tools_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, experiment_id=experiment_id, flow_runtime_name=flow_runtime_name, template_url=self.get_flow_tools.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowToolsDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_tools.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/flowTools'} # type: ignore @distributed_trace def setup_flow_session( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str experiment_id, # type: str body=None, # type: Optional["_models.SetupFlowSessionRequest"] **kwargs # type: Any ): # type: (...) -> Any """setup_flow_session. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.SetupFlowSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: any, or the result of cls(response) :rtype: any :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Any] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'SetupFlowSessionRequest') else: _json = None request = build_setup_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, content_type=content_type, experiment_id=experiment_id, json=_json, template_url=self.setup_flow_session.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: deserialized = self._deserialize('object', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('object', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized setup_flow_session.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/sessions'} # type: ignore @distributed_trace def delete_flow_session( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str experiment_id, # type: str **kwargs # type: Any ): # type: (...) -> Any """delete_flow_session. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: any, or the result of cls(response) :rtype: any :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Any] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, experiment_id=experiment_id, template_url=self.delete_flow_session.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: deserialized = self._deserialize('object', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('object', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_flow_session.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/sessions'} # type: ignore @distributed_trace def get_flow_session_status( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_id, # type: str experiment_id, # type: str **kwargs # type: Any ): # type: (...) -> "_models.FlowSessionDto" """get_flow_session_status. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowSessionDto, or the result of cls(response) :rtype: ~flow.models.FlowSessionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowSessionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_session_status_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, experiment_id=experiment_id, template_url=self.get_flow_session_status.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowSessionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_session_status.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/sessions/status'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_flow_runtimes_workspace_independent_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False # fmt: off def build_get_runtime_latest_config_request( **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/runtimes/latestConfig') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) # fmt: on class FlowRuntimesWorkspaceIndependentOperations(object): """FlowRuntimesWorkspaceIndependentOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def get_runtime_latest_config( self, **kwargs # type: Any ): # type: (...) -> "_models.RuntimeConfiguration" """get_runtime_latest_config. :keyword callable cls: A custom type or function that will be passed the direct response :return: RuntimeConfiguration, or the result of cls(response) :rtype: ~flow.models.RuntimeConfiguration :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.RuntimeConfiguration"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_runtime_latest_config_request( template_url=self.get_runtime_latest_config.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('RuntimeConfiguration', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_runtime_latest_config.metadata = {'url': '/flow/api/runtimes/latestConfig'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_connections_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False # fmt: off def build_create_connection_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_update_connection_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, **kwargs ) def build_get_connection_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_delete_connection_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="DELETE", url=url, headers=header_parameters, **kwargs ) def build_get_connection_with_secrets_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}/listsecrets') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_list_connections_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_list_connection_specs_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/specs') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_list_azure_open_ai_deployments_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}/AzureOpenAIDeployments') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) # fmt: on class ConnectionsOperations(object): """ConnectionsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def create_connection( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str body=None, # type: Optional["_models.CreateOrUpdateConnectionRequestDto"] **kwargs # type: Any ): # type: (...) -> "_models.ConnectionDto" """create_connection. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :param body: :type body: ~flow.models.CreateOrUpdateConnectionRequestDto :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionDto, or the result of cls(response) :rtype: ~flow.models.ConnectionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateOrUpdateConnectionRequestDto') else: _json = None request = build_create_connection_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, content_type=content_type, json=_json, template_url=self.create_connection.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}'} # type: ignore @distributed_trace def update_connection( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str body=None, # type: Optional["_models.CreateOrUpdateConnectionRequestDto"] **kwargs # type: Any ): # type: (...) -> "_models.ConnectionDto" """update_connection. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :param body: :type body: ~flow.models.CreateOrUpdateConnectionRequestDto :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionDto, or the result of cls(response) :rtype: ~flow.models.ConnectionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateOrUpdateConnectionRequestDto') else: _json = None request = build_update_connection_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, content_type=content_type, json=_json, template_url=self.update_connection.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}'} # type: ignore @distributed_trace def get_connection( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.ConnectionDto" """get_connection. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionDto, or the result of cls(response) :rtype: ~flow.models.ConnectionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_connection_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, template_url=self.get_connection.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}'} # type: ignore @distributed_trace def delete_connection( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.ConnectionDto" """delete_connection. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionDto, or the result of cls(response) :rtype: ~flow.models.ConnectionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_connection_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, template_url=self.delete_connection.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}'} # type: ignore @distributed_trace def get_connection_with_secrets( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.ConnectionDto" """get_connection_with_secrets. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionDto, or the result of cls(response) :rtype: ~flow.models.ConnectionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_connection_with_secrets_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, template_url=self.get_connection_with_secrets.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_connection_with_secrets.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}/listsecrets'} # type: ignore @distributed_trace def list_connections( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> List["_models.ConnectionDto"] """list_connections. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of ConnectionDto, or the result of cls(response) :rtype: list[~flow.models.ConnectionDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.ConnectionDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_connections_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.list_connections.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[ConnectionDto]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_connections.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections'} # type: ignore @distributed_trace def list_connection_specs( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> List["_models.WorkspaceConnectionSpec"] """list_connection_specs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of WorkspaceConnectionSpec, or the result of cls(response) :rtype: list[~flow.models.WorkspaceConnectionSpec] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.WorkspaceConnectionSpec"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_connection_specs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.list_connection_specs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[WorkspaceConnectionSpec]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_connection_specs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/specs'} # type: ignore @distributed_trace def list_azure_open_ai_deployments( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> List["_models.AzureOpenAIDeploymentDto"] """list_azure_open_ai_deployments. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of AzureOpenAIDeploymentDto, or the result of cls(response) :rtype: list[~flow.models.AzureOpenAIDeploymentDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.AzureOpenAIDeploymentDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_azure_open_ai_deployments_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, template_url=self.list_azure_open_ai_deployments.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[AzureOpenAIDeploymentDto]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_azure_open_ai_deployments.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}/AzureOpenAIDeployments'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_connection_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False # fmt: off def build_create_connection_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_update_connection_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, **kwargs ) def build_get_connection_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_delete_connection_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest connection_scope = kwargs.pop('connection_scope', None) # type: Optional[Union[str, "_models.ConnectionScope"]] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if connection_scope is not None: query_parameters['connectionScope'] = _SERIALIZER.query("connection_scope", connection_scope, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="DELETE", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_list_connections_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_list_connection_specs_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/specs') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) # fmt: on class ConnectionOperations(object): """ConnectionOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def create_connection( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str body=None, # type: Optional["_models.CreateOrUpdateConnectionRequest"] **kwargs # type: Any ): # type: (...) -> "_models.ConnectionEntity" """create_connection. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :param body: :type body: ~flow.models.CreateOrUpdateConnectionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionEntity, or the result of cls(response) :rtype: ~flow.models.ConnectionEntity :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionEntity"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateOrUpdateConnectionRequest') else: _json = None request = build_create_connection_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, content_type=content_type, json=_json, template_url=self.create_connection.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionEntity', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}'} # type: ignore @distributed_trace def update_connection( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str body=None, # type: Optional["_models.CreateOrUpdateConnectionRequest"] **kwargs # type: Any ): # type: (...) -> "_models.ConnectionEntity" """update_connection. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :param body: :type body: ~flow.models.CreateOrUpdateConnectionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionEntity, or the result of cls(response) :rtype: ~flow.models.ConnectionEntity :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionEntity"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateOrUpdateConnectionRequest') else: _json = None request = build_update_connection_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, content_type=content_type, json=_json, template_url=self.update_connection.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionEntity', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}'} # type: ignore @distributed_trace def get_connection( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.ConnectionEntity" """get_connection. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionEntity, or the result of cls(response) :rtype: ~flow.models.ConnectionEntity :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionEntity"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_connection_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, template_url=self.get_connection.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionEntity', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}'} # type: ignore @distributed_trace def delete_connection( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str connection_name, # type: str connection_scope=None, # type: Optional[Union[str, "_models.ConnectionScope"]] **kwargs # type: Any ): # type: (...) -> "_models.ConnectionEntity" """delete_connection. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :param connection_scope: :type connection_scope: str or ~flow.models.ConnectionScope :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionEntity, or the result of cls(response) :rtype: ~flow.models.ConnectionEntity :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionEntity"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_connection_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, connection_scope=connection_scope, template_url=self.delete_connection.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionEntity', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}'} # type: ignore @distributed_trace def list_connections( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> List["_models.ConnectionEntity"] """list_connections. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of ConnectionEntity, or the result of cls(response) :rtype: list[~flow.models.ConnectionEntity] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.ConnectionEntity"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_connections_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.list_connections.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[ConnectionEntity]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_connections.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection'} # type: ignore @distributed_trace def list_connection_specs( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> List["_models.ConnectionSpec"] """list_connection_specs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of ConnectionSpec, or the result of cls(response) :rtype: list[~flow.models.ConnectionSpec] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.ConnectionSpec"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_connection_specs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.list_connection_specs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[ConnectionSpec]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_connection_specs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/specs'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_bulk_runs_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False # fmt: off def build_submit_bulk_run_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/submit') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_cancel_flow_run_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "text/plain, application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/cancel') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_clone_flow_from_flow_run_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest content_type = kwargs.pop('content_type', None) # type: Optional[str] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/clone') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_get_flow_run_info_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_get_flow_child_runs_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest index = kwargs.pop('index', None) # type: Optional[int] start_index = kwargs.pop('start_index', None) # type: Optional[int] end_index = kwargs.pop('end_index', None) # type: Optional[int] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/childRuns') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if index is not None: query_parameters['index'] = _SERIALIZER.query("index", index, 'int') if start_index is not None: query_parameters['startIndex'] = _SERIALIZER.query("start_index", start_index, 'int') if end_index is not None: query_parameters['endIndex'] = _SERIALIZER.query("end_index", end_index, 'int') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_flow_node_runs_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str node_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest index = kwargs.pop('index', None) # type: Optional[int] start_index = kwargs.pop('start_index', None) # type: Optional[int] end_index = kwargs.pop('end_index', None) # type: Optional[int] aggregation = kwargs.pop('aggregation', False) # type: Optional[bool] accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/nodeRuns/{nodeName}') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), "nodeName": _SERIALIZER.url("node_name", node_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if index is not None: query_parameters['index'] = _SERIALIZER.query("index", index, 'int') if start_index is not None: query_parameters['startIndex'] = _SERIALIZER.query("start_index", start_index, 'int') if end_index is not None: query_parameters['endIndex'] = _SERIALIZER.query("end_index", end_index, 'int') if aggregation is not None: query_parameters['aggregation'] = _SERIALIZER.query("aggregation", aggregation, 'bool') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_flow_node_run_base_path_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str node_name, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/nodeRuns/{nodeName}/basePath') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), "nodeName": _SERIALIZER.url("node_name", node_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_get_flow_run_log_content_request( subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> HttpRequest accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/logContent') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), "flowRunId": _SERIALIZER.url("flow_run_id", flow_run_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) # fmt: on class BulkRunsOperations(object): """BulkRunsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def submit_bulk_run( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str body=None, # type: Optional["_models.SubmitBulkRunRequest"] **kwargs # type: Any ): # type: (...) -> str """submit_bulk_run. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param body: :type body: ~flow.models.SubmitBulkRunRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'SubmitBulkRunRequest') else: _json = None request = build_submit_bulk_run_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.submit_bulk_run.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: deserialized = self._deserialize('str', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('str', pipeline_response) if response.status_code == 204: deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized submit_bulk_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/submit'} # type: ignore @distributed_trace def cancel_flow_run( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> str """cancel_flow_run. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_run_id: :type flow_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_cancel_flow_run_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_run_id=flow_run_id, template_url=self.cancel_flow_run.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized cancel_flow_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/cancel'} # type: ignore @distributed_trace def clone_flow_from_flow_run( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str body=None, # type: Optional["_models.CreateFlowRequest"] **kwargs # type: Any ): # type: (...) -> "_models.FlowDto" """clone_flow_from_flow_run. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_run_id: :type flow_run_id: str :param body: :type body: ~flow.models.CreateFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowDto, or the result of cls(response) :rtype: ~flow.models.FlowDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowRequest') else: _json = None request = build_clone_flow_from_flow_run_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_run_id=flow_run_id, content_type=content_type, json=_json, template_url=self.clone_flow_from_flow_run.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized clone_flow_from_flow_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/clone'} # type: ignore @distributed_trace def get_flow_run_info( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> "_models.FlowRunInfo" """get_flow_run_info. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_run_id: :type flow_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRunInfo, or the result of cls(response) :rtype: ~flow.models.FlowRunInfo :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRunInfo"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_run_info_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_run_id=flow_run_id, template_url=self.get_flow_run_info.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRunInfo', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_run_info.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}'} # type: ignore @distributed_trace def get_flow_child_runs( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str index=None, # type: Optional[int] start_index=None, # type: Optional[int] end_index=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> List[Any] """get_flow_child_runs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_run_id: :type flow_run_id: str :param index: :type index: int :param start_index: :type start_index: int :param end_index: :type end_index: int :keyword callable cls: A custom type or function that will be passed the direct response :return: list of any, or the result of cls(response) :rtype: list[any] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[Any]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_child_runs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_run_id=flow_run_id, index=index, start_index=start_index, end_index=end_index, template_url=self.get_flow_child_runs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[object]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_child_runs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/childRuns'} # type: ignore @distributed_trace def get_flow_node_runs( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str node_name, # type: str index=None, # type: Optional[int] start_index=None, # type: Optional[int] end_index=None, # type: Optional[int] aggregation=False, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> List[Any] """get_flow_node_runs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_run_id: :type flow_run_id: str :param node_name: :type node_name: str :param index: :type index: int :param start_index: :type start_index: int :param end_index: :type end_index: int :param aggregation: :type aggregation: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: list of any, or the result of cls(response) :rtype: list[any] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[Any]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_node_runs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_run_id=flow_run_id, node_name=node_name, index=index, start_index=start_index, end_index=end_index, aggregation=aggregation, template_url=self.get_flow_node_runs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[object]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_node_runs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/nodeRuns/{nodeName}'} # type: ignore @distributed_trace def get_flow_node_run_base_path( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str node_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.FlowRunBasePath" """get_flow_node_run_base_path. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_run_id: :type flow_run_id: str :param node_name: :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRunBasePath, or the result of cls(response) :rtype: ~flow.models.FlowRunBasePath :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRunBasePath"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_node_run_base_path_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_run_id=flow_run_id, node_name=node_name, template_url=self.get_flow_node_run_base_path.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRunBasePath', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_node_run_base_path.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/nodeRuns/{nodeName}/basePath'} # type: ignore @distributed_trace def get_flow_run_log_content( self, subscription_id, # type: str resource_group_name, # type: str workspace_name, # type: str flow_run_id, # type: str **kwargs # type: Any ): # type: (...) -> str """get_flow_run_log_content. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_run_id: :type flow_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_run_log_content_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_run_id=flow_run_id, template_url=self.get_flow_run_log_content.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_run_log_content.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/logContent'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/__init__.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from ._azure_machine_learning_designer_service_client import AzureMachineLearningDesignerServiceClient __all__ = ['AzureMachineLearningDesignerServiceClient'] # `._patch.py` is used for handwritten extensions to the generated code # Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md from ._patch import patch_sdk patch_sdk()
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/_azure_machine_learning_designer_service_client.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy from typing import Any, Awaitable, Optional from azure.core import AsyncPipelineClient from azure.core.rest import AsyncHttpResponse, HttpRequest from msrest import Deserializer, Serializer from .. import models from ._configuration import AzureMachineLearningDesignerServiceClientConfiguration from .operations import BulkRunsOperations, ConnectionOperations, ConnectionsOperations, FlowRunsAdminOperations, FlowRuntimesOperations, FlowRuntimesWorkspaceIndependentOperations, FlowSessionsOperations, FlowsOperations, FlowsProviderOperations, ToolsOperations class AzureMachineLearningDesignerServiceClient: """AzureMachineLearningDesignerServiceClient. :ivar bulk_runs: BulkRunsOperations operations :vartype bulk_runs: flow.aio.operations.BulkRunsOperations :ivar connection: ConnectionOperations operations :vartype connection: flow.aio.operations.ConnectionOperations :ivar connections: ConnectionsOperations operations :vartype connections: flow.aio.operations.ConnectionsOperations :ivar flow_runs_admin: FlowRunsAdminOperations operations :vartype flow_runs_admin: flow.aio.operations.FlowRunsAdminOperations :ivar flow_runtimes: FlowRuntimesOperations operations :vartype flow_runtimes: flow.aio.operations.FlowRuntimesOperations :ivar flow_runtimes_workspace_independent: FlowRuntimesWorkspaceIndependentOperations operations :vartype flow_runtimes_workspace_independent: flow.aio.operations.FlowRuntimesWorkspaceIndependentOperations :ivar flows: FlowsOperations operations :vartype flows: flow.aio.operations.FlowsOperations :ivar flow_sessions: FlowSessionsOperations operations :vartype flow_sessions: flow.aio.operations.FlowSessionsOperations :ivar flows_provider: FlowsProviderOperations operations :vartype flows_provider: flow.aio.operations.FlowsProviderOperations :ivar tools: ToolsOperations operations :vartype tools: flow.aio.operations.ToolsOperations :param base_url: Service URL. Default value is ''. :type base_url: str :param api_version: Api Version. The default value is "1.0.0". :type api_version: str """ def __init__( self, base_url: str = "", api_version: Optional[str] = "1.0.0", **kwargs: Any ) -> None: self._config = AzureMachineLearningDesignerServiceClientConfiguration(api_version=api_version, **kwargs) self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False self.bulk_runs = BulkRunsOperations(self._client, self._config, self._serialize, self._deserialize) self.connection = ConnectionOperations(self._client, self._config, self._serialize, self._deserialize) self.connections = ConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) self.flow_runs_admin = FlowRunsAdminOperations(self._client, self._config, self._serialize, self._deserialize) self.flow_runtimes = FlowRuntimesOperations(self._client, self._config, self._serialize, self._deserialize) self.flow_runtimes_workspace_independent = FlowRuntimesWorkspaceIndependentOperations(self._client, self._config, self._serialize, self._deserialize) self.flows = FlowsOperations(self._client, self._config, self._serialize, self._deserialize) self.flow_sessions = FlowSessionsOperations(self._client, self._config, self._serialize, self._deserialize) self.flows_provider = FlowsProviderOperations(self._client, self._config, self._serialize, self._deserialize) self.tools = ToolsOperations(self._client, self._config, self._serialize, self._deserialize) def _send_request( self, request: HttpRequest, **kwargs: Any ) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") <HttpRequest [GET], url: 'https://www.example.org/'> >>> response = await client._send_request(request) <AsyncHttpResponse: 200 OK> For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest :keyword bool stream: Whether the response payload will be streamed. Defaults to False. :return: The response of your network call. Does not do error handling on your response. :rtype: ~azure.core.rest.AsyncHttpResponse """ request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) return self._client.send_request(request_copy, **kwargs) async def close(self) -> None: await self._client.close() async def __aenter__(self) -> "AzureMachineLearningDesignerServiceClient": await self._client.__aenter__() return self async def __aexit__(self, *exc_details) -> None: await self._client.__aexit__(*exc_details)
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/_patch.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- # coding=utf-8 # -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- # This file is used for handwritten extensions to the generated code. Example: # https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md def patch_sdk(): pass
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/_configuration.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Optional from azure.core.configuration import Configuration from azure.core.pipeline import policies VERSION = "unknown" class AzureMachineLearningDesignerServiceClientConfiguration(Configuration): """Configuration for AzureMachineLearningDesignerServiceClient. Note that all parameters used to create this instance are saved as instance attributes. :param api_version: Api Version. The default value is "1.0.0". :type api_version: str """ def __init__( self, api_version: Optional[str] = "1.0.0", **kwargs: Any ) -> None: super(AzureMachineLearningDesignerServiceClientConfiguration, self).__init__(**kwargs) self.api_version = api_version kwargs.setdefault('sdk_moniker', 'azuremachinelearningdesignerserviceclient/{}'.format(VERSION)) self._configure(**kwargs) def _configure( self, **kwargs: Any ) -> None: self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy')
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_flow_runs_admin_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime import functools from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from ... import models as _models from ..._vendor import _convert_request from ...operations._flow_runs_admin_operations import build_batch_update_service_logs_request, build_check_policy_validation_async_request, build_get_storage_info_request, build_log_flow_run_event_request, build_log_flow_run_event_v2_request, build_log_flow_run_terminated_event_request, build_log_result_for_bulk_run_request, build_send_policy_validation_async_request, build_submit_bulk_run_async_request, build_update_service_logs_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class FlowRunsAdminOperations: """FlowRunsAdminOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace_async async def submit_bulk_run_async( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, bulk_run_id: str, error_handling_mode: Optional[Union[str, "_models.ErrorHandlingMode"]] = None, **kwargs: Any ) -> "_models.SubmitBulkRunResponse": """submit_bulk_run_async. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_run_id: :type bulk_run_id: str :param error_handling_mode: :type error_handling_mode: str or ~flow.models.ErrorHandlingMode :keyword callable cls: A custom type or function that will be passed the direct response :return: SubmitBulkRunResponse, or the result of cls(response) :rtype: ~flow.models.SubmitBulkRunResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SubmitBulkRunResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_submit_bulk_run_async_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_run_id=bulk_run_id, error_handling_mode=error_handling_mode, template_url=self.submit_bulk_run_async.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SubmitBulkRunResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized submit_bulk_run_async.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/submit'} # type: ignore @distributed_trace_async async def send_policy_validation_async( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, bulk_run_id: str, **kwargs: Any ) -> "_models.PolicyValidationResponse": """send_policy_validation_async. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_run_id: :type bulk_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PolicyValidationResponse, or the result of cls(response) :rtype: ~flow.models.PolicyValidationResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyValidationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_send_policy_validation_async_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_run_id=bulk_run_id, template_url=self.send_policy_validation_async.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('PolicyValidationResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized send_policy_validation_async.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/policy'} # type: ignore @distributed_trace_async async def check_policy_validation_async( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, bulk_run_id: str, **kwargs: Any ) -> "_models.PolicyValidationResponse": """check_policy_validation_async. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_run_id: :type bulk_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PolicyValidationResponse, or the result of cls(response) :rtype: ~flow.models.PolicyValidationResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyValidationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_check_policy_validation_async_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_run_id=bulk_run_id, template_url=self.check_policy_validation_async.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('PolicyValidationResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_policy_validation_async.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/policy'} # type: ignore @distributed_trace_async async def log_result_for_bulk_run( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, bulk_run_id: str, **kwargs: Any ) -> List["_models.KeyValuePairStringObject"]: """log_result_for_bulk_run. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_run_id: :type bulk_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of KeyValuePairStringObject, or the result of cls(response) :rtype: list[~flow.models.KeyValuePairStringObject] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.KeyValuePairStringObject"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_log_result_for_bulk_run_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_run_id=bulk_run_id, template_url=self.log_result_for_bulk_run.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[KeyValuePairStringObject]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized log_result_for_bulk_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/LogResult'} # type: ignore @distributed_trace_async async def get_storage_info( self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> "_models.StorageInfo": """get_storage_info. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageInfo, or the result of cls(response) :rtype: ~flow.models.StorageInfo :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageInfo"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_storage_info_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_storage_info.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('StorageInfo', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_storage_info.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/storageInfo'} # type: ignore @distributed_trace_async async def log_flow_run_event( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, flow_run_id: str, runtime_version: str, **kwargs: Any ) -> str: """log_flow_run_event. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param runtime_version: :type runtime_version: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_log_flow_run_event_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, runtime_version=runtime_version, template_url=self.log_flow_run_event.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized log_flow_run_event.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/flowRuns/{flowRunId}/runtime/{runtimeVersion}/logEvent'} # type: ignore @distributed_trace_async async def log_flow_run_event_v2( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, flow_run_id: str, runtime_version: Optional[str] = None, **kwargs: Any ) -> str: """log_flow_run_event_v2. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param runtime_version: :type runtime_version: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_log_flow_run_event_v2_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, runtime_version=runtime_version, template_url=self.log_flow_run_event_v2.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized log_flow_run_event_v2.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/flowRuns/{flowRunId}/logEvent'} # type: ignore @distributed_trace_async async def log_flow_run_terminated_event( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, flow_run_id: str, last_checked_time: Optional[datetime.datetime] = None, **kwargs: Any ) -> "_models.LogRunTerminatedEventDto": """log_flow_run_terminated_event. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param last_checked_time: :type last_checked_time: ~datetime.datetime :keyword callable cls: A custom type or function that will be passed the direct response :return: LogRunTerminatedEventDto, or the result of cls(response) :rtype: ~flow.models.LogRunTerminatedEventDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.LogRunTerminatedEventDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_log_flow_run_terminated_event_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, last_checked_time=last_checked_time, template_url=self.log_flow_run_terminated_event.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('LogRunTerminatedEventDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized log_flow_run_terminated_event.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/flowRuns/{flowRunId}/logTerminatedEvent'} # type: ignore @distributed_trace_async async def update_service_logs( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, bulk_run_id: str, body: Optional["_models.ServiceLogRequest"] = None, **kwargs: Any ) -> "_models.Task": """update_service_logs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_run_id: :type bulk_run_id: str :param body: :type body: ~flow.models.ServiceLogRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: Task, or the result of cls(response) :rtype: ~flow.models.Task :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.Task"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'ServiceLogRequest') else: _json = None request = build_update_service_logs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_run_id=bulk_run_id, content_type=content_type, json=_json, template_url=self.update_service_logs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('Task', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_service_logs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/serviceLogs'} # type: ignore @distributed_trace_async async def batch_update_service_logs( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, bulk_run_id: str, body: Optional[List["_models.ServiceLogRequest"]] = None, **kwargs: Any ) -> "_models.Task": """batch_update_service_logs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_run_id: :type bulk_run_id: str :param body: :type body: list[~flow.models.ServiceLogRequest] :keyword callable cls: A custom type or function that will be passed the direct response :return: Task, or the result of cls(response) :rtype: ~flow.models.Task :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.Task"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, '[ServiceLogRequest]') else: _json = None request = build_batch_update_service_logs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_run_id=bulk_run_id, content_type=content_type, json=_json, template_url=self.batch_update_service_logs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('Task', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized batch_update_service_logs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRunsAdmin/{flowId}/bulkRuns/{bulkRunId}/serviceLogs/batch'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_flow_runtimes_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from ... import models as _models from ..._vendor import _convert_request from ...operations._flow_runtimes_operations import build_check_ci_availability_request, build_check_mir_availability_request, build_check_runtime_upgrade_request, build_create_runtime_request, build_delete_runtime_request, build_get_runtime_capability_request, build_get_runtime_latest_config_request, build_get_runtime_request, build_list_runtimes_request, build_update_runtime_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class FlowRuntimesOperations: """FlowRuntimesOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace_async async def create_runtime( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: str, async_call: Optional[bool] = False, msi_token: Optional[bool] = False, skip_port_check: Optional[bool] = False, body: Optional["_models.CreateFlowRuntimeRequest"] = None, **kwargs: Any ) -> "_models.FlowRuntimeDto": """create_runtime. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :param async_call: :type async_call: bool :param msi_token: :type msi_token: bool :param skip_port_check: :type skip_port_check: bool :param body: :type body: ~flow.models.CreateFlowRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeDto, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowRuntimeRequest') else: _json = None request = build_create_runtime_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, content_type=content_type, json=_json, async_call=async_call, msi_token=msi_token, skip_port_check=skip_port_check, template_url=self.create_runtime.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_runtime.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}'} # type: ignore @distributed_trace_async async def update_runtime( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: str, async_call: Optional[bool] = False, msi_token: Optional[bool] = False, skip_port_check: Optional[bool] = False, body: Optional["_models.UpdateFlowRuntimeRequest"] = None, **kwargs: Any ) -> "_models.FlowRuntimeDto": """update_runtime. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :param async_call: :type async_call: bool :param msi_token: :type msi_token: bool :param skip_port_check: :type skip_port_check: bool :param body: :type body: ~flow.models.UpdateFlowRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeDto, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'UpdateFlowRuntimeRequest') else: _json = None request = build_update_runtime_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, content_type=content_type, json=_json, async_call=async_call, msi_token=msi_token, skip_port_check=skip_port_check, template_url=self.update_runtime.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_runtime.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}'} # type: ignore @distributed_trace_async async def get_runtime( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: str, **kwargs: Any ) -> "_models.FlowRuntimeDto": """get_runtime. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeDto, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_runtime_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, template_url=self.get_runtime.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_runtime.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}'} # type: ignore @distributed_trace_async async def delete_runtime( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: str, async_call: Optional[bool] = False, msi_token: Optional[bool] = False, **kwargs: Any ) -> "_models.FlowRuntimeDto": """delete_runtime. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :param async_call: :type async_call: bool :param msi_token: :type msi_token: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeDto, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_runtime_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, async_call=async_call, msi_token=msi_token, template_url=self.delete_runtime.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_runtime.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}'} # type: ignore @distributed_trace_async async def check_ci_availability( self, subscription_id: str, resource_group_name: str, workspace_name: str, compute_instance_name: str, custom_app_name: str, **kwargs: Any ) -> "_models.AvailabilityResponse": """check_ci_availability. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param compute_instance_name: :type compute_instance_name: str :param custom_app_name: :type custom_app_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: AvailabilityResponse, or the result of cls(response) :rtype: ~flow.models.AvailabilityResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailabilityResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_check_ci_availability_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_instance_name=compute_instance_name, custom_app_name=custom_app_name, template_url=self.check_ci_availability.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('AvailabilityResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_ci_availability.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/checkCiAvailability'} # type: ignore @distributed_trace_async async def check_mir_availability( self, subscription_id: str, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any ) -> "_models.AvailabilityResponse": """check_mir_availability. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param endpoint_name: :type endpoint_name: str :param deployment_name: :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: AvailabilityResponse, or the result of cls(response) :rtype: ~flow.models.AvailabilityResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailabilityResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_check_mir_availability_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, template_url=self.check_mir_availability.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('AvailabilityResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_mir_availability.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/checkMirAvailability'} # type: ignore @distributed_trace_async async def check_runtime_upgrade( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: str, **kwargs: Any ) -> bool: """check_runtime_upgrade. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: bool, or the result of cls(response) :rtype: bool :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[bool] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_check_runtime_upgrade_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, template_url=self.check_runtime_upgrade.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('bool', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_runtime_upgrade.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}/needUpgrade'} # type: ignore @distributed_trace_async async def get_runtime_capability( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: str, **kwargs: Any ) -> "_models.FlowRuntimeCapability": """get_runtime_capability. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeCapability, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeCapability :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeCapability"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_runtime_capability_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, template_url=self.get_runtime_capability.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeCapability', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_runtime_capability.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}/capability'} # type: ignore @distributed_trace_async async def get_runtime_latest_config( self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> "_models.RuntimeConfiguration": """get_runtime_latest_config. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: RuntimeConfiguration, or the result of cls(response) :rtype: ~flow.models.RuntimeConfiguration :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.RuntimeConfiguration"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_runtime_latest_config_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_runtime_latest_config.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('RuntimeConfiguration', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_runtime_latest_config.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/latestConfig'} # type: ignore @distributed_trace_async async def list_runtimes( self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> List["_models.FlowRuntimeDto"]: """list_runtimes. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of FlowRuntimeDto, or the result of cls(response) :rtype: list[~flow.models.FlowRuntimeDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.FlowRuntimeDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_runtimes_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.list_runtimes.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[FlowRuntimeDto]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_runtimes.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_tools_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from ... import models as _models from ..._vendor import _convert_request from ...operations._tools_operations import build_get_dynamic_list_request, build_get_package_tools_request, build_get_samples_request, build_get_tool_meta_request, build_get_tool_meta_v2_request, build_get_tool_setting_request, build_retrieve_tool_func_result_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class ToolsOperations: """ToolsOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace_async async def get_tool_setting( self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> "_models.ToolSetting": """get_tool_setting. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ToolSetting, or the result of cls(response) :rtype: ~flow.models.ToolSetting :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ToolSetting"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_tool_setting_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_tool_setting.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ToolSetting', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_tool_setting.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/setting'} # type: ignore @distributed_trace_async async def get_samples( self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> Dict[str, "_models.Tool"]: """get_samples. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: dict mapping str to Tool, or the result of cls(response) :rtype: dict[str, ~flow.models.Tool] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.Tool"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_samples_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_samples.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('{Tool}', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_samples.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/samples'} # type: ignore @distributed_trace_async async def get_tool_meta( self, subscription_id: str, resource_group_name: str, workspace_name: str, tool_name: str, tool_type: str, endpoint_name: Optional[str] = None, flow_runtime_name: Optional[str] = None, flow_id: Optional[str] = None, data: Optional[str] = None, **kwargs: Any ) -> str: """get_tool_meta. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param tool_name: :type tool_name: str :param tool_type: :type tool_type: str :param endpoint_name: :type endpoint_name: str :param flow_runtime_name: :type flow_runtime_name: str :param flow_id: :type flow_id: str :param data: :type data: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "text/plain") # type: Optional[str] _content = data request = build_get_tool_meta_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, tool_name=tool_name, tool_type=tool_type, content=_content, endpoint_name=endpoint_name, flow_runtime_name=flow_runtime_name, flow_id=flow_id, template_url=self.get_tool_meta.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_tool_meta.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta'} # type: ignore @distributed_trace_async async def get_tool_meta_v2( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_runtime_name: Optional[str] = None, flow_id: Optional[str] = None, body: Optional["_models.GenerateToolMetaRequest"] = None, **kwargs: Any ) -> "_models.ToolMetaDto": """get_tool_meta_v2. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_runtime_name: :type flow_runtime_name: str :param flow_id: :type flow_id: str :param body: :type body: ~flow.models.GenerateToolMetaRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ToolMetaDto, or the result of cls(response) :rtype: ~flow.models.ToolMetaDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ToolMetaDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'GenerateToolMetaRequest') else: _json = None request = build_get_tool_meta_v2_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, flow_runtime_name=flow_runtime_name, flow_id=flow_id, template_url=self.get_tool_meta_v2.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ToolMetaDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_tool_meta_v2.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta-v2'} # type: ignore @distributed_trace_async async def get_package_tools( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_runtime_name: Optional[str] = None, flow_id: Optional[str] = None, **kwargs: Any ) -> Dict[str, "_models.Tool"]: """get_package_tools. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_runtime_name: :type flow_runtime_name: str :param flow_id: :type flow_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: dict mapping str to Tool, or the result of cls(response) :rtype: dict[str, ~flow.models.Tool] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.Tool"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_package_tools_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_runtime_name=flow_runtime_name, flow_id=flow_id, template_url=self.get_package_tools.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('{Tool}', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_package_tools.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/packageTools'} # type: ignore @distributed_trace_async async def get_dynamic_list( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_runtime_name: Optional[str] = None, flow_id: Optional[str] = None, body: Optional["_models.GetDynamicListRequest"] = None, **kwargs: Any ) -> List[Any]: """get_dynamic_list. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_runtime_name: :type flow_runtime_name: str :param flow_id: :type flow_id: str :param body: :type body: ~flow.models.GetDynamicListRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: list of any, or the result of cls(response) :rtype: list[any] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[Any]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'GetDynamicListRequest') else: _json = None request = build_get_dynamic_list_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, flow_runtime_name=flow_runtime_name, flow_id=flow_id, template_url=self.get_dynamic_list.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[object]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_dynamic_list.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/dynamicList'} # type: ignore @distributed_trace_async async def retrieve_tool_func_result( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_runtime_name: Optional[str] = None, flow_id: Optional[str] = None, body: Optional["_models.RetrieveToolFuncResultRequest"] = None, **kwargs: Any ) -> "_models.ToolFuncResponse": """retrieve_tool_func_result. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_runtime_name: :type flow_runtime_name: str :param flow_id: :type flow_id: str :param body: :type body: ~flow.models.RetrieveToolFuncResultRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ToolFuncResponse, or the result of cls(response) :rtype: ~flow.models.ToolFuncResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ToolFuncResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'RetrieveToolFuncResultRequest') else: _json = None request = build_retrieve_tool_func_result_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, flow_runtime_name=flow_runtime_name, flow_id=flow_id, template_url=self.retrieve_tool_func_result.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ToolFuncResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized retrieve_tool_func_result.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/RetrieveToolFuncResult'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_flows_provider_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from ... import models as _models from ..._vendor import _convert_request from ...operations._flows_provider_operations import build_get_index_entity_by_id_request, build_get_updated_entity_ids_for_workspace_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class FlowsProviderOperations: """FlowsProviderOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace_async async def get_index_entity_by_id( self, subscription_id: str, resource_group_name: str, workspace_name: str, body: Optional["_models.UnversionedEntityRequestDto"] = None, **kwargs: Any ) -> "_models.UnversionedEntityResponseDto": """get_index_entity_by_id. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param body: :type body: ~flow.models.UnversionedEntityRequestDto :keyword callable cls: A custom type or function that will be passed the direct response :return: UnversionedEntityResponseDto, or the result of cls(response) :rtype: ~flow.models.UnversionedEntityResponseDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.UnversionedEntityResponseDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'UnversionedEntityRequestDto') else: _json = None request = build_get_index_entity_by_id_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.get_index_entity_by_id.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('UnversionedEntityResponseDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_index_entity_by_id.metadata = {'url': '/flow/v1.0/flows/getIndexEntities'} # type: ignore @distributed_trace_async async def get_updated_entity_ids_for_workspace( self, subscription_id: str, resource_group_name: str, workspace_name: str, body: Optional["_models.UnversionedRebuildIndexDto"] = None, **kwargs: Any ) -> "_models.UnversionedRebuildResponseDto": """get_updated_entity_ids_for_workspace. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param body: :type body: ~flow.models.UnversionedRebuildIndexDto :keyword callable cls: A custom type or function that will be passed the direct response :return: UnversionedRebuildResponseDto, or the result of cls(response) :rtype: ~flow.models.UnversionedRebuildResponseDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.UnversionedRebuildResponseDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'UnversionedRebuildIndexDto') else: _json = None request = build_get_updated_entity_ids_for_workspace_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.get_updated_entity_ids_for_workspace.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('UnversionedRebuildResponseDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_updated_entity_ids_for_workspace.metadata = {'url': '/flow/v1.0/flows/rebuildIndex'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/__init__.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from ._bulk_runs_operations import BulkRunsOperations from ._connection_operations import ConnectionOperations from ._connections_operations import ConnectionsOperations from ._flow_runs_admin_operations import FlowRunsAdminOperations from ._flow_runtimes_operations import FlowRuntimesOperations from ._flow_runtimes_workspace_independent_operations import FlowRuntimesWorkspaceIndependentOperations from ._flows_operations import FlowsOperations from ._flow_sessions_operations import FlowSessionsOperations from ._flows_provider_operations import FlowsProviderOperations from ._tools_operations import ToolsOperations __all__ = [ 'BulkRunsOperations', 'ConnectionOperations', 'ConnectionsOperations', 'FlowRunsAdminOperations', 'FlowRuntimesOperations', 'FlowRuntimesWorkspaceIndependentOperations', 'FlowsOperations', 'FlowSessionsOperations', 'FlowsProviderOperations', 'ToolsOperations', ]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_flow_sessions_admin_operations.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- # coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from ... import models as _models from ..._vendor import _convert_request from ...operations._flow_sessions_admin_operations import build_create_flow_session_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class FlowSessionsAdminOperations: """FlowSessionsAdminOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace_async async def create_flow_session( self, subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, waitfor_completion: Optional[bool] = False, body: Optional["_models.CreateFlowSessionRequest"] = None, **kwargs: Any ) -> str: """create_flow_session. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param session_id: :type session_id: str :param waitfor_completion: :type waitfor_completion: bool :param body: :type body: ~flow.models.CreateFlowSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowSessionRequest') else: _json = None request = build_create_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, session_id=session_id, content_type=content_type, json=_json, waitfor_completion=waitfor_completion, template_url=self.create_flow_session.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_flow_session.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessionsAdmin/{sessionId}'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_flow_sessions_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from ... import models as _models from ..._vendor import _convert_request from ...operations._flow_sessions_operations import build_create_flow_session_request, build_delete_flow_session_request, build_get_flow_session_request, build_get_standby_pools_request, build_poll_operation_status_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class FlowSessionsOperations: """FlowSessionsOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace_async async def create_flow_session( self, subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, body: Optional["_models.CreateFlowSessionRequest"] = None, **kwargs: Any ) -> Any: """create_flow_session. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param session_id: :type session_id: str :param body: :type body: ~flow.models.CreateFlowSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: any, or the result of cls(response) :rtype: any :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Any] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowSessionRequest') else: _json = None request = build_create_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, session_id=session_id, content_type=content_type, json=_json, template_url=self.create_flow_session.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: deserialized = self._deserialize('object', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('object', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_flow_session.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}'} # type: ignore @distributed_trace_async async def get_flow_session( self, subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, **kwargs: Any ) -> "_models.GetTrainingSessionDto": """get_flow_session. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param session_id: :type session_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: GetTrainingSessionDto, or the result of cls(response) :rtype: ~flow.models.GetTrainingSessionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.GetTrainingSessionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, session_id=session_id, template_url=self.get_flow_session.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('GetTrainingSessionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_session.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}'} # type: ignore @distributed_trace_async async def delete_flow_session( self, subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, **kwargs: Any ) -> Any: """delete_flow_session. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param session_id: :type session_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: any, or the result of cls(response) :rtype: any :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Any] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, session_id=session_id, template_url=self.delete_flow_session.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: deserialized = self._deserialize('object', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('object', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_flow_session.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}'} # type: ignore @distributed_trace_async async def poll_operation_status( self, subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, action_type: Union[str, "_models.SetupFlowSessionAction"], location: str, operation_id: str, api_version: Optional[str] = "1.0.0", type: Optional[str] = None, **kwargs: Any ) -> Any: """poll_operation_status. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param session_id: :type session_id: str :param action_type: :type action_type: str or ~flow.models.SetupFlowSessionAction :param location: :type location: str :param operation_id: :type operation_id: str :param api_version: Api Version. The default value is "1.0.0". :type api_version: str :param type: :type type: str :keyword callable cls: A custom type or function that will be passed the direct response :return: any, or the result of cls(response) :rtype: any :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Any] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_poll_operation_status_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, session_id=session_id, action_type=action_type, location=location, operation_id=operation_id, api_version=api_version, type=type, template_url=self.poll_operation_status.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('object', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized poll_operation_status.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/{sessionId}/{actionType}/locations/{location}/operations/{operationId}'} # type: ignore @distributed_trace_async async def get_standby_pools( self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> List["_models.StandbyPoolProperties"]: """get_standby_pools. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of StandbyPoolProperties, or the result of cls(response) :rtype: list[~flow.models.StandbyPoolProperties] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.StandbyPoolProperties"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_standby_pools_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_standby_pools.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[StandbyPoolProperties]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_standby_pools.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessions/standbypools'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_flows_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from ... import models as _models from ..._vendor import _convert_request from ...operations._flows_operations import build_cancel_bulk_test_run_request, build_cancel_flow_run_request, build_cancel_flow_test_request, build_clone_flow_from_flow_run_request, build_clone_flow_request, build_create_flow_from_sample_request, build_create_flow_request, build_delete_flow_session_request, build_deploy_flow_request, build_get_bulk_test_request, build_get_connection_override_settings_request, build_get_evaluate_flow_samples_request, build_get_flow_child_runs_request, build_get_flow_deploy_reserved_environment_variable_names_request, build_get_flow_inputs_request, build_get_flow_node_run_base_path_request, build_get_flow_node_runs_request, build_get_flow_request, build_get_flow_run_info_request, build_get_flow_run_log_content_request, build_get_flow_run_status_request, build_get_flow_session_status_request, build_get_flow_snapshot_request, build_get_flow_tools_request, build_get_samples_request, build_list_bulk_tests_request, build_list_flows_request, build_load_as_component_request, build_patch_flow_request, build_setup_flow_session_request, build_submit_flow_request, build_update_flow_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class FlowsOperations: """FlowsOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace_async async def create_flow( self, subscription_id: str, resource_group_name: str, workspace_name: str, experiment_id: Optional[str] = None, body: Optional["_models.CreateFlowRequest"] = None, **kwargs: Any ) -> "_models.FlowDto": """create_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.CreateFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowDto, or the result of cls(response) :rtype: ~flow.models.FlowDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowRequest') else: _json = None request = build_create_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, experiment_id=experiment_id, template_url=self.create_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows'} # type: ignore @distributed_trace_async async def list_flows( self, subscription_id: str, resource_group_name: str, workspace_name: str, experiment_id: Optional[str] = None, owned_only: Optional[bool] = None, flow_type: Optional[Union[str, "_models.FlowType"]] = None, list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, **kwargs: Any ) -> List["_models.FlowBaseDto"]: """list_flows. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param experiment_id: :type experiment_id: str :param owned_only: :type owned_only: bool :param flow_type: :type flow_type: str or ~flow.models.FlowType :param list_view_type: :type list_view_type: str or ~flow.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response :return: list of FlowBaseDto, or the result of cls(response) :rtype: list[~flow.models.FlowBaseDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.FlowBaseDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_flows_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, experiment_id=experiment_id, owned_only=owned_only, flow_type=flow_type, list_view_type=list_view_type, template_url=self.list_flows.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[FlowBaseDto]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_flows.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows'} # type: ignore @distributed_trace_async async def clone_flow( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, experiment_id: str, body: Optional["_models.CreateFlowRequest"] = None, **kwargs: Any ) -> "_models.FlowDto": """clone_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.CreateFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowDto, or the result of cls(response) :rtype: ~flow.models.FlowDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowRequest') else: _json = None request = build_clone_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, content_type=content_type, experiment_id=experiment_id, json=_json, template_url=self.clone_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized clone_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/clone'} # type: ignore @distributed_trace_async async def create_flow_from_sample( self, subscription_id: str, resource_group_name: str, workspace_name: str, experiment_id: Optional[str] = None, body: Optional["_models.CreateFlowFromSampleRequest"] = None, **kwargs: Any ) -> "_models.FlowDto": """create_flow_from_sample. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.CreateFlowFromSampleRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowDto, or the result of cls(response) :rtype: ~flow.models.FlowDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowFromSampleRequest') else: _json = None request = build_create_flow_from_sample_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, experiment_id=experiment_id, template_url=self.create_flow_from_sample.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_flow_from_sample.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/fromsample'} # type: ignore @distributed_trace_async async def update_flow( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, experiment_id: str, body: Optional["_models.UpdateFlowRequest"] = None, **kwargs: Any ) -> str: """update_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.UpdateFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'UpdateFlowRequest') else: _json = None request = build_update_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, content_type=content_type, experiment_id=experiment_id, json=_json, template_url=self.update_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}'} # type: ignore @distributed_trace_async async def patch_flow( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, experiment_id: str, body: Optional["_models.PatchFlowRequest"] = None, **kwargs: Any ) -> str: """patch_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.PatchFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json-patch+json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'PatchFlowRequest') else: _json = None request = build_patch_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, content_type=content_type, experiment_id=experiment_id, json=_json, template_url=self.patch_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized patch_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}'} # type: ignore @distributed_trace_async async def get_flow( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, experiment_id: str, **kwargs: Any ) -> "_models.FlowDto": """get_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowDto, or the result of cls(response) :rtype: ~flow.models.FlowDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, experiment_id=experiment_id, template_url=self.get_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}'} # type: ignore @distributed_trace_async async def submit_flow( self, subscription_id: str, resource_group_name: str, workspace_name: str, experiment_id: str, endpoint_name: Optional[str] = None, body: Optional["_models.SubmitFlowRequest"] = None, **kwargs: Any ) -> "_models.FlowRunResult": """submit_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param experiment_id: :type experiment_id: str :param endpoint_name: :type endpoint_name: str :param body: :type body: ~flow.models.SubmitFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRunResult, or the result of cls(response) :rtype: ~flow.models.FlowRunResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRunResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'SubmitFlowRequest') else: _json = None request = build_submit_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, experiment_id=experiment_id, json=_json, endpoint_name=endpoint_name, template_url=self.submit_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRunResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized submit_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/submit'} # type: ignore @distributed_trace_async async def get_flow_run_status( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, flow_run_id: str, experiment_id: Optional[str] = None, **kwargs: Any ) -> "_models.FlowRunResult": """get_flow_run_status. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param experiment_id: :type experiment_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRunResult, or the result of cls(response) :rtype: ~flow.models.FlowRunResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRunResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_run_status_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, experiment_id=experiment_id, template_url=self.get_flow_run_status.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRunResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_run_status.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/{flowRunId}/status'} # type: ignore @distributed_trace_async async def get_flow_run_info( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, flow_run_id: str, experiment_id: str, **kwargs: Any ) -> "_models.FlowRunInfo": """get_flow_run_info. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param experiment_id: :type experiment_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRunInfo, or the result of cls(response) :rtype: ~flow.models.FlowRunInfo :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRunInfo"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_run_info_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, experiment_id=experiment_id, template_url=self.get_flow_run_info.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRunInfo', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_run_info.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}'} # type: ignore @distributed_trace_async async def get_flow_child_runs( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, flow_run_id: str, index: Optional[int] = None, start_index: Optional[int] = None, end_index: Optional[int] = None, **kwargs: Any ) -> List[Any]: """get_flow_child_runs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param index: :type index: int :param start_index: :type start_index: int :param end_index: :type end_index: int :keyword callable cls: A custom type or function that will be passed the direct response :return: list of any, or the result of cls(response) :rtype: list[any] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[Any]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_child_runs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, index=index, start_index=start_index, end_index=end_index, template_url=self.get_flow_child_runs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[object]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_child_runs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/childRuns'} # type: ignore @distributed_trace_async async def get_flow_node_runs( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, flow_run_id: str, node_name: str, index: Optional[int] = None, start_index: Optional[int] = None, end_index: Optional[int] = None, aggregation: Optional[bool] = False, **kwargs: Any ) -> List[Any]: """get_flow_node_runs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param node_name: :type node_name: str :param index: :type index: int :param start_index: :type start_index: int :param end_index: :type end_index: int :param aggregation: :type aggregation: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: list of any, or the result of cls(response) :rtype: list[any] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[Any]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_node_runs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, node_name=node_name, index=index, start_index=start_index, end_index=end_index, aggregation=aggregation, template_url=self.get_flow_node_runs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[object]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_node_runs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/nodeRuns/{nodeName}'} # type: ignore @distributed_trace_async async def get_flow_node_run_base_path( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, flow_run_id: str, node_name: str, **kwargs: Any ) -> "_models.FlowRunBasePath": """get_flow_node_run_base_path. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param node_name: :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRunBasePath, or the result of cls(response) :rtype: ~flow.models.FlowRunBasePath :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRunBasePath"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_node_run_base_path_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, node_name=node_name, template_url=self.get_flow_node_run_base_path.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRunBasePath', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_node_run_base_path.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/nodeRuns/{nodeName}/basePath'} # type: ignore @distributed_trace_async async def clone_flow_from_flow_run( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, flow_run_id: str, experiment_id: str, body: Optional["_models.CreateFlowRequest"] = None, **kwargs: Any ) -> "_models.FlowDto": """clone_flow_from_flow_run. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.CreateFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowDto, or the result of cls(response) :rtype: ~flow.models.FlowDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowRequest') else: _json = None request = build_clone_flow_from_flow_run_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, content_type=content_type, experiment_id=experiment_id, json=_json, template_url=self.clone_flow_from_flow_run.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized clone_flow_from_flow_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/clone'} # type: ignore @distributed_trace_async async def list_bulk_tests( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, experiment_id: Optional[str] = None, **kwargs: Any ) -> List["_models.BulkTestDto"]: """list_bulk_tests. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of BulkTestDto, or the result of cls(response) :rtype: list[~flow.models.BulkTestDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.BulkTestDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_bulk_tests_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, experiment_id=experiment_id, template_url=self.list_bulk_tests.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[BulkTestDto]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_bulk_tests.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/bulkTests'} # type: ignore @distributed_trace_async async def get_bulk_test( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, bulk_test_id: str, **kwargs: Any ) -> "_models.BulkTestDto": """get_bulk_test. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param bulk_test_id: :type bulk_test_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: BulkTestDto, or the result of cls(response) :rtype: ~flow.models.BulkTestDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.BulkTestDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_bulk_test_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, bulk_test_id=bulk_test_id, template_url=self.get_bulk_test.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('BulkTestDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_bulk_test.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/bulkTests/{bulkTestId}'} # type: ignore @distributed_trace_async async def get_samples( self, subscription_id: str, resource_group_name: str, workspace_name: str, use_snapshot: Optional[bool] = False, **kwargs: Any ) -> Dict[str, "_models.FlowSampleDto"]: """get_samples. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param use_snapshot: :type use_snapshot: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: dict mapping str to FlowSampleDto, or the result of cls(response) :rtype: dict[str, ~flow.models.FlowSampleDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.FlowSampleDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_samples_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, use_snapshot=use_snapshot, template_url=self.get_samples.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('{FlowSampleDto}', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_samples.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/samples'} # type: ignore @distributed_trace_async async def get_evaluate_flow_samples( self, subscription_id: str, resource_group_name: str, workspace_name: str, use_snapshot: Optional[bool] = False, **kwargs: Any ) -> Dict[str, "_models.FlowSampleDto"]: """get_evaluate_flow_samples. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param use_snapshot: :type use_snapshot: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: dict mapping str to FlowSampleDto, or the result of cls(response) :rtype: dict[str, ~flow.models.FlowSampleDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.FlowSampleDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_evaluate_flow_samples_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, use_snapshot=use_snapshot, template_url=self.get_evaluate_flow_samples.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('{FlowSampleDto}', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_evaluate_flow_samples.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/evaluateSamples'} # type: ignore @distributed_trace_async async def get_flow_deploy_reserved_environment_variable_names( self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> List[str]: """get_flow_deploy_reserved_environment_variable_names. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of str, or the result of cls(response) :rtype: list[str] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[str]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_deploy_reserved_environment_variable_names_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_flow_deploy_reserved_environment_variable_names.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[str]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_deploy_reserved_environment_variable_names.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/DeployReservedEnvironmentVariableNames'} # type: ignore @distributed_trace_async async def deploy_flow( self, subscription_id: str, resource_group_name: str, workspace_name: str, async_call: Optional[bool] = False, msi_token: Optional[bool] = False, body: Optional["_models.DeployFlowRequest"] = None, **kwargs: Any ) -> str: """deploy_flow. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param async_call: :type async_call: bool :param msi_token: :type msi_token: bool :param body: :type body: ~flow.models.DeployFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'DeployFlowRequest') else: _json = None request = build_deploy_flow_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, async_call=async_call, msi_token=msi_token, template_url=self.deploy_flow.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized deploy_flow.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/deploy'} # type: ignore @distributed_trace_async async def get_flow_run_log_content( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, flow_run_id: str, **kwargs: Any ) -> str: """get_flow_run_log_content. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_run_log_content_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, template_url=self.get_flow_run_log_content.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_run_log_content.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/runs/{flowRunId}/logContent'} # type: ignore @distributed_trace_async async def cancel_flow_run( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_run_id: str, **kwargs: Any ) -> str: """cancel_flow_run. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_run_id: :type flow_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_cancel_flow_run_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_run_id=flow_run_id, template_url=self.cancel_flow_run.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized cancel_flow_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/runs/{flowRunId}/cancel'} # type: ignore @distributed_trace_async async def cancel_flow_test( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, flow_run_id: str, **kwargs: Any ) -> str: """cancel_flow_test. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param flow_run_id: :type flow_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_cancel_flow_test_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, flow_run_id=flow_run_id, template_url=self.cancel_flow_test.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized cancel_flow_test.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/flowTests/{flowRunId}/cancel'} # type: ignore @distributed_trace_async async def cancel_bulk_test_run( self, subscription_id: str, resource_group_name: str, workspace_name: str, bulk_test_run_id: str, **kwargs: Any ) -> str: """cancel_bulk_test_run. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param bulk_test_run_id: :type bulk_test_run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_cancel_bulk_test_run_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, bulk_test_run_id=bulk_test_run_id, template_url=self.cancel_bulk_test_run.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized cancel_bulk_test_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/bulkTests/{bulkTestRunId}/cancel'} # type: ignore @distributed_trace_async async def get_flow_snapshot( self, subscription_id: str, resource_group_name: str, workspace_name: str, body: Optional["_models.CreateFlowRequest"] = None, **kwargs: Any ) -> "_models.FlowSnapshot": """get_flow_snapshot. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param body: :type body: ~flow.models.CreateFlowRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowSnapshot, or the result of cls(response) :rtype: ~flow.models.FlowSnapshot :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowSnapshot"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowRequest') else: _json = None request = build_get_flow_snapshot_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.get_flow_snapshot.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowSnapshot', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_snapshot.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/FlowSnapshot'} # type: ignore @distributed_trace_async async def get_connection_override_settings( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: Optional[str] = None, body: Optional["_models.FlowGraphReference"] = None, **kwargs: Any ) -> List["_models.ConnectionOverrideSetting"]: """get_connection_override_settings. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :param body: :type body: ~flow.models.FlowGraphReference :keyword callable cls: A custom type or function that will be passed the direct response :return: list of ConnectionOverrideSetting, or the result of cls(response) :rtype: list[~flow.models.ConnectionOverrideSetting] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.ConnectionOverrideSetting"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'FlowGraphReference') else: _json = None request = build_get_connection_override_settings_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, runtime_name=runtime_name, template_url=self.get_connection_override_settings.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[ConnectionOverrideSetting]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_connection_override_settings.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/connectionOverride'} # type: ignore @distributed_trace_async async def get_flow_inputs( self, subscription_id: str, resource_group_name: str, workspace_name: str, body: Optional["_models.FlowGraphReference"] = None, **kwargs: Any ) -> Dict[str, "_models.FlowInputDefinition"]: """get_flow_inputs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param body: :type body: ~flow.models.FlowGraphReference :keyword callable cls: A custom type or function that will be passed the direct response :return: dict mapping str to FlowInputDefinition, or the result of cls(response) :rtype: dict[str, ~flow.models.FlowInputDefinition] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.FlowInputDefinition"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'FlowGraphReference') else: _json = None request = build_get_flow_inputs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.get_flow_inputs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('{FlowInputDefinition}', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_inputs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/flowInputs'} # type: ignore @distributed_trace_async async def load_as_component( self, subscription_id: str, resource_group_name: str, workspace_name: str, body: Optional["_models.LoadFlowAsComponentRequest"] = None, **kwargs: Any ) -> str: """load_as_component. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param body: :type body: ~flow.models.LoadFlowAsComponentRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: str, or the result of cls(response) :rtype: str :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[str] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'LoadFlowAsComponentRequest') else: _json = None request = build_load_as_component_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.load_as_component.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('str', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized load_as_component.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/LoadAsComponent'} # type: ignore @distributed_trace_async async def get_flow_tools( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, experiment_id: str, flow_runtime_name: Optional[str] = None, **kwargs: Any ) -> "_models.FlowToolsDto": """get_flow_tools. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :param flow_runtime_name: :type flow_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowToolsDto, or the result of cls(response) :rtype: ~flow.models.FlowToolsDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowToolsDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_tools_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, experiment_id=experiment_id, flow_runtime_name=flow_runtime_name, template_url=self.get_flow_tools.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowToolsDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_tools.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/flowTools'} # type: ignore @distributed_trace_async async def setup_flow_session( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, experiment_id: str, body: Optional["_models.SetupFlowSessionRequest"] = None, **kwargs: Any ) -> Any: """setup_flow_session. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :param body: :type body: ~flow.models.SetupFlowSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: any, or the result of cls(response) :rtype: any :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Any] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'SetupFlowSessionRequest') else: _json = None request = build_setup_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, content_type=content_type, experiment_id=experiment_id, json=_json, template_url=self.setup_flow_session.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: deserialized = self._deserialize('object', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('object', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized setup_flow_session.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/sessions'} # type: ignore @distributed_trace_async async def delete_flow_session( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, experiment_id: str, **kwargs: Any ) -> Any: """delete_flow_session. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: any, or the result of cls(response) :rtype: any :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Any] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_flow_session_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, experiment_id=experiment_id, template_url=self.delete_flow_session.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: deserialized = self._deserialize('object', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('object', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_flow_session.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/sessions'} # type: ignore @distributed_trace_async async def get_flow_session_status( self, subscription_id: str, resource_group_name: str, workspace_name: str, flow_id: str, experiment_id: str, **kwargs: Any ) -> "_models.FlowSessionDto": """get_flow_session_status. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param flow_id: :type flow_id: str :param experiment_id: :type experiment_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowSessionDto, or the result of cls(response) :rtype: ~flow.models.FlowSessionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowSessionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_flow_session_status_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, flow_id=flow_id, experiment_id=experiment_id, template_url=self.get_flow_session_status.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowSessionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_flow_session_status.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Flows/{flowId}/sessions/status'} # type: ignore
0