repo_id
stringlengths
15
132
file_path
stringlengths
34
176
content
stringlengths
2
3.52M
__index_level_0__
int64
0
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_flow_runtimes_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from ... import models as _models from ..._vendor import _convert_request from ...operations._flow_runtimes_operations import build_check_ci_availability_request, build_check_mir_availability_request, build_check_runtime_upgrade_request, build_create_runtime_request, build_delete_runtime_request, build_get_runtime_capability_request, build_get_runtime_latest_config_request, build_get_runtime_request, build_list_runtimes_request, build_update_runtime_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class FlowRuntimesOperations: """FlowRuntimesOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace_async async def create_runtime( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: str, async_call: Optional[bool] = False, msi_token: Optional[bool] = False, skip_port_check: Optional[bool] = False, body: Optional["_models.CreateFlowRuntimeRequest"] = None, **kwargs: Any ) -> "_models.FlowRuntimeDto": """create_runtime. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :param async_call: :type async_call: bool :param msi_token: :type msi_token: bool :param skip_port_check: :type skip_port_check: bool :param body: :type body: ~flow.models.CreateFlowRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeDto, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateFlowRuntimeRequest') else: _json = None request = build_create_runtime_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, content_type=content_type, json=_json, async_call=async_call, msi_token=msi_token, skip_port_check=skip_port_check, template_url=self.create_runtime.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_runtime.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}'} # type: ignore @distributed_trace_async async def update_runtime( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: str, async_call: Optional[bool] = False, msi_token: Optional[bool] = False, skip_port_check: Optional[bool] = False, body: Optional["_models.UpdateFlowRuntimeRequest"] = None, **kwargs: Any ) -> "_models.FlowRuntimeDto": """update_runtime. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :param async_call: :type async_call: bool :param msi_token: :type msi_token: bool :param skip_port_check: :type skip_port_check: bool :param body: :type body: ~flow.models.UpdateFlowRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeDto, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'UpdateFlowRuntimeRequest') else: _json = None request = build_update_runtime_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, content_type=content_type, json=_json, async_call=async_call, msi_token=msi_token, skip_port_check=skip_port_check, template_url=self.update_runtime.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_runtime.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}'} # type: ignore @distributed_trace_async async def get_runtime( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: str, **kwargs: Any ) -> "_models.FlowRuntimeDto": """get_runtime. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeDto, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_runtime_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, template_url=self.get_runtime.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_runtime.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}'} # type: ignore @distributed_trace_async async def delete_runtime( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: str, async_call: Optional[bool] = False, msi_token: Optional[bool] = False, **kwargs: Any ) -> "_models.FlowRuntimeDto": """delete_runtime. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :param async_call: :type async_call: bool :param msi_token: :type msi_token: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeDto, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_runtime_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, async_call=async_call, msi_token=msi_token, template_url=self.delete_runtime.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_runtime.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}'} # type: ignore @distributed_trace_async async def check_ci_availability( self, subscription_id: str, resource_group_name: str, workspace_name: str, compute_instance_name: str, custom_app_name: str, **kwargs: Any ) -> "_models.AvailabilityResponse": """check_ci_availability. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param compute_instance_name: :type compute_instance_name: str :param custom_app_name: :type custom_app_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: AvailabilityResponse, or the result of cls(response) :rtype: ~flow.models.AvailabilityResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailabilityResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_check_ci_availability_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_instance_name=compute_instance_name, custom_app_name=custom_app_name, template_url=self.check_ci_availability.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('AvailabilityResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_ci_availability.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/checkCiAvailability'} # type: ignore @distributed_trace_async async def check_mir_availability( self, subscription_id: str, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any ) -> "_models.AvailabilityResponse": """check_mir_availability. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param endpoint_name: :type endpoint_name: str :param deployment_name: :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: AvailabilityResponse, or the result of cls(response) :rtype: ~flow.models.AvailabilityResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailabilityResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_check_mir_availability_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, template_url=self.check_mir_availability.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('AvailabilityResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_mir_availability.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/checkMirAvailability'} # type: ignore @distributed_trace_async async def check_runtime_upgrade( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: str, **kwargs: Any ) -> bool: """check_runtime_upgrade. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: bool, or the result of cls(response) :rtype: bool :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[bool] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_check_runtime_upgrade_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, template_url=self.check_runtime_upgrade.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('bool', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_runtime_upgrade.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}/needUpgrade'} # type: ignore @distributed_trace_async async def get_runtime_capability( self, subscription_id: str, resource_group_name: str, workspace_name: str, runtime_name: str, **kwargs: Any ) -> "_models.FlowRuntimeCapability": """get_runtime_capability. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param runtime_name: :type runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FlowRuntimeCapability, or the result of cls(response) :rtype: ~flow.models.FlowRuntimeCapability :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRuntimeCapability"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_runtime_capability_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, runtime_name=runtime_name, template_url=self.get_runtime_capability.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('FlowRuntimeCapability', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_runtime_capability.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/{runtimeName}/capability'} # type: ignore @distributed_trace_async async def get_runtime_latest_config( self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> "_models.RuntimeConfiguration": """get_runtime_latest_config. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: RuntimeConfiguration, or the result of cls(response) :rtype: ~flow.models.RuntimeConfiguration :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.RuntimeConfiguration"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_runtime_latest_config_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.get_runtime_latest_config.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('RuntimeConfiguration', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_runtime_latest_config.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes/latestConfig'} # type: ignore @distributed_trace_async async def list_runtimes( self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> List["_models.FlowRuntimeDto"]: """list_runtimes. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of FlowRuntimeDto, or the result of cls(response) :rtype: list[~flow.models.FlowRuntimeDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.FlowRuntimeDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_runtimes_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.list_runtimes.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[FlowRuntimeDto]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_runtimes.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowRuntimes'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/__init__.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from ._bulk_runs_operations import BulkRunsOperations from ._connection_operations import ConnectionOperations from ._connections_operations import ConnectionsOperations from ._flow_runs_admin_operations import FlowRunsAdminOperations from ._flow_runtimes_operations import FlowRuntimesOperations from ._flow_runtimes_workspace_independent_operations import FlowRuntimesWorkspaceIndependentOperations from ._flows_operations import FlowsOperations from ._flow_sessions_operations import FlowSessionsOperations from ._flows_provider_operations import FlowsProviderOperations from ._tools_operations import ToolsOperations __all__ = [ 'BulkRunsOperations', 'ConnectionOperations', 'ConnectionsOperations', 'FlowRunsAdminOperations', 'FlowRuntimesOperations', 'FlowRuntimesWorkspaceIndependentOperations', 'FlowsOperations', 'FlowSessionsOperations', 'FlowsProviderOperations', 'ToolsOperations', ]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_connections_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from ... import models as _models from ..._vendor import _convert_request from ...operations._connections_operations import build_create_connection_request, build_delete_connection_request, build_get_connection_request, build_get_connection_with_secrets_request, build_list_azure_open_ai_deployments_request, build_list_connection_specs_request, build_list_connections_request, build_update_connection_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class ConnectionsOperations: """ConnectionsOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~flow.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace_async async def create_connection( self, subscription_id: str, resource_group_name: str, workspace_name: str, connection_name: str, body: Optional["_models.CreateOrUpdateConnectionRequestDto"] = None, **kwargs: Any ) -> "_models.ConnectionDto": """create_connection. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :param body: :type body: ~flow.models.CreateOrUpdateConnectionRequestDto :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionDto, or the result of cls(response) :rtype: ~flow.models.ConnectionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateOrUpdateConnectionRequestDto') else: _json = None request = build_create_connection_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, content_type=content_type, json=_json, template_url=self.create_connection.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}'} # type: ignore @distributed_trace_async async def update_connection( self, subscription_id: str, resource_group_name: str, workspace_name: str, connection_name: str, body: Optional["_models.CreateOrUpdateConnectionRequestDto"] = None, **kwargs: Any ) -> "_models.ConnectionDto": """update_connection. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :param body: :type body: ~flow.models.CreateOrUpdateConnectionRequestDto :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionDto, or the result of cls(response) :rtype: ~flow.models.ConnectionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] if body is not None: _json = self._serialize.body(body, 'CreateOrUpdateConnectionRequestDto') else: _json = None request = build_update_connection_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, content_type=content_type, json=_json, template_url=self.update_connection.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}'} # type: ignore @distributed_trace_async async def get_connection( self, subscription_id: str, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any ) -> "_models.ConnectionDto": """get_connection. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionDto, or the result of cls(response) :rtype: ~flow.models.ConnectionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_connection_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, template_url=self.get_connection.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}'} # type: ignore @distributed_trace_async async def delete_connection( self, subscription_id: str, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any ) -> "_models.ConnectionDto": """delete_connection. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionDto, or the result of cls(response) :rtype: ~flow.models.ConnectionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_connection_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, template_url=self.delete_connection.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}'} # type: ignore @distributed_trace_async async def get_connection_with_secrets( self, subscription_id: str, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any ) -> "_models.ConnectionDto": """get_connection_with_secrets. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ConnectionDto, or the result of cls(response) :rtype: ~flow.models.ConnectionDto :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_connection_with_secrets_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, template_url=self.get_connection_with_secrets.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ConnectionDto', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_connection_with_secrets.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}/listsecrets'} # type: ignore @distributed_trace_async async def list_connections( self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> List["_models.ConnectionDto"]: """list_connections. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of ConnectionDto, or the result of cls(response) :rtype: list[~flow.models.ConnectionDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.ConnectionDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_connections_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.list_connections.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[ConnectionDto]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_connections.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections'} # type: ignore @distributed_trace_async async def list_connection_specs( self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> List["_models.WorkspaceConnectionSpec"]: """list_connection_specs. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of WorkspaceConnectionSpec, or the result of cls(response) :rtype: list[~flow.models.WorkspaceConnectionSpec] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.WorkspaceConnectionSpec"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_connection_specs_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, template_url=self.list_connection_specs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[WorkspaceConnectionSpec]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_connection_specs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/specs'} # type: ignore @distributed_trace_async async def list_azure_open_ai_deployments( self, subscription_id: str, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any ) -> List["_models.AzureOpenAIDeploymentDto"]: """list_azure_open_ai_deployments. :param subscription_id: The Azure Subscription ID. :type subscription_id: str :param resource_group_name: The Name of the resource group in which the workspace is located. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param connection_name: :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: list of AzureOpenAIDeploymentDto, or the result of cls(response) :rtype: list[~flow.models.AzureOpenAIDeploymentDto] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["_models.AzureOpenAIDeploymentDto"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_list_azure_open_ai_deployments_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, template_url=self.list_azure_open_ai_deployments.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('[AzureOpenAIDeploymentDto]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_azure_open_ai_deployments.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}/AzureOpenAIDeployments'} # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/models/_azure_machine_learning_designer_service_client_enums.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from enum import Enum from six import with_metaclass from azure.core import CaseInsensitiveEnumMeta class ActionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SEND_VALIDATION_REQUEST = "SendValidationRequest" GET_VALIDATION_STATUS = "GetValidationStatus" SUBMIT_BULK_RUN = "SubmitBulkRun" LOG_RUN_RESULT = "LogRunResult" LOG_RUN_TERMINATED_EVENT = "LogRunTerminatedEvent" class AetherArgumentValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): LITERAL = "Literal" PARAMETER = "Parameter" INPUT = "Input" OUTPUT = "Output" NESTED_LIST = "NestedList" STRING_INTERPOLATION_LIST = "StringInterpolationList" class AetherAssetType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): URI_FILE = "UriFile" URI_FOLDER = "UriFolder" ML_TABLE = "MLTable" CUSTOM_MODEL = "CustomModel" ML_FLOW_MODEL = "MLFlowModel" TRITON_MODEL = "TritonModel" OPEN_AI_MODEL = "OpenAIModel" class AetherBuildSourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CLOUD_BUILD = "CloudBuild" VSO = "Vso" VSO_GIT = "VsoGit" class AetherComputeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): BATCH_AI = "BatchAi" MLC = "MLC" HDI_CLUSTER = "HdiCluster" REMOTE_DOCKER = "RemoteDocker" DATABRICKS = "Databricks" AISC = "Aisc" class AetherControlFlowType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" DO_WHILE = "DoWhile" PARALLEL_FOR = "ParallelFor" class AetherControlInputValue(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" FALSE = "False" TRUE = "True" SKIPPED = "Skipped" class AetherDataCopyMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MERGE_WITH_OVERWRITE = "MergeWithOverwrite" FAIL_IF_CONFLICT = "FailIfConflict" class AetherDataLocationStorageType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): COSMOS = "Cosmos" AZURE_BLOB = "AzureBlob" ARTIFACT = "Artifact" SNAPSHOT = "Snapshot" SAVED_AML_DATASET = "SavedAmlDataset" ASSET = "Asset" class AetherDataReferenceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" AZURE_BLOB = "AzureBlob" AZURE_DATA_LAKE = "AzureDataLake" AZURE_FILES = "AzureFiles" COSMOS = "Cosmos" PHILLY_HDFS = "PhillyHdfs" AZURE_SQL_DATABASE = "AzureSqlDatabase" AZURE_POSTGRES_DATABASE = "AzurePostgresDatabase" AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2" DBFS = "DBFS" AZURE_MY_SQL_DATABASE = "AzureMySqlDatabase" CUSTOM = "Custom" HDFS = "Hdfs" class AetherDatasetType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): FILE = "File" TABULAR = "Tabular" class AetherDataStoreMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" MOUNT = "Mount" DOWNLOAD = "Download" UPLOAD = "Upload" DIRECT = "Direct" HDFS = "Hdfs" LINK = "Link" class AetherDataTransferStorageType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DATA_BASE = "DataBase" FILE_SYSTEM = "FileSystem" class AetherDataTransferTaskType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): IMPORT_DATA = "ImportData" EXPORT_DATA = "ExportData" COPY_DATA = "CopyData" class AetherEarlyTerminationPolicyType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): BANDIT = "Bandit" MEDIAN_STOPPING = "MedianStopping" TRUNCATION_SELECTION = "TruncationSelection" class AetherEntityStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ACTIVE = "Active" DEPRECATED = "Deprecated" DISABLED = "Disabled" class AetherExecutionEnvironment(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): EXE_WORKER_MACHINE = "ExeWorkerMachine" DOCKER_CONTAINER_WITHOUT_NETWORK = "DockerContainerWithoutNetwork" DOCKER_CONTAINER_WITH_NETWORK = "DockerContainerWithNetwork" HYPER_V_WITHOUT_NETWORK = "HyperVWithoutNetwork" HYPER_V_WITH_NETWORK = "HyperVWithNetwork" class AetherExecutionPhase(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): EXECUTION = "Execution" INITIALIZATION = "Initialization" FINALIZATION = "Finalization" class AetherFeaturizationMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" CUSTOM = "Custom" OFF = "Off" class AetherFileBasedPathType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): UNKNOWN = "Unknown" FILE = "File" FOLDER = "Folder" class AetherForecastHorizonMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" CUSTOM = "Custom" class AetherIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): USER_IDENTITY = "UserIdentity" MANAGED = "Managed" AML_TOKEN = "AMLToken" class AetherLogVerbosity(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NOT_SET = "NotSet" DEBUG = "Debug" INFO = "Info" WARNING = "Warning" ERROR = "Error" CRITICAL = "Critical" class AetherModuleDeploymentSource(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CLIENT = "Client" AUTO_DEPLOYMENT = "AutoDeployment" VSTS = "Vsts" class AetherModuleHashVersion(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): IDENTIFIER_HASH = "IdentifierHash" IDENTIFIER_HASH_V2 = "IdentifierHashV2" class AetherModuleType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" BATCH_INFERENCING = "BatchInferencing" class AetherNCrossValidationMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" CUSTOM = "Custom" class AetherParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): INT = "Int" DOUBLE = "Double" BOOL = "Bool" STRING = "String" UNDEFINED = "Undefined" class AetherParameterValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): LITERAL = "Literal" GRAPH_PARAMETER_NAME = "GraphParameterName" CONCATENATE = "Concatenate" INPUT = "Input" DATA_PATH = "DataPath" DATA_SET_DEFINITION = "DataSetDefinition" class AetherPrimaryMetrics(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUC_WEIGHTED = "AUCWeighted" ACCURACY = "Accuracy" NORM_MACRO_RECALL = "NormMacroRecall" AVERAGE_PRECISION_SCORE_WEIGHTED = "AveragePrecisionScoreWeighted" PRECISION_SCORE_WEIGHTED = "PrecisionScoreWeighted" SPEARMAN_CORRELATION = "SpearmanCorrelation" NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "NormalizedRootMeanSquaredError" R2_SCORE = "R2Score" NORMALIZED_MEAN_ABSOLUTE_ERROR = "NormalizedMeanAbsoluteError" NORMALIZED_ROOT_MEAN_SQUARED_LOG_ERROR = "NormalizedRootMeanSquaredLogError" MEAN_AVERAGE_PRECISION = "MeanAveragePrecision" IOU = "Iou" class AetherRepositoryType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" OTHER = "Other" GIT = "Git" SOURCE_DEPOT = "SourceDepot" COSMOS = "Cosmos" class AetherResourceOperator(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): EQUAL = "Equal" CONTAIN = "Contain" GREATER_OR_EQUAL = "GreaterOrEqual" class AetherResourceValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): STRING = "String" DOUBLE = "Double" class AetherSamplingAlgorithmType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): RANDOM = "Random" GRID = "Grid" BAYESIAN = "Bayesian" class AetherSeasonalityMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" CUSTOM = "Custom" class AetherShortSeriesHandlingConfiguration(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" PAD = "Pad" DROP = "Drop" class AetherStackMetaLearnerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" LOGISTIC_REGRESSION = "LogisticRegression" LOGISTIC_REGRESSION_CV = "LogisticRegressionCV" LIGHT_GBM_CLASSIFIER = "LightGBMClassifier" ELASTIC_NET = "ElasticNet" ELASTIC_NET_CV = "ElasticNetCV" LIGHT_GBM_REGRESSOR = "LightGBMRegressor" LINEAR_REGRESSION = "LinearRegression" class AetherStoredProcedureParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): STRING = "String" INT = "Int" DECIMAL = "Decimal" GUID = "Guid" BOOLEAN = "Boolean" DATE = "Date" class AetherTabularTrainingMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DISTRIBUTED = "Distributed" NON_DISTRIBUTED = "NonDistributed" AUTO = "Auto" class AetherTargetAggregationFunction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SUM = "Sum" MAX = "Max" MIN = "Min" MEAN = "Mean" class AetherTargetLagsMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" CUSTOM = "Custom" class AetherTargetRollingWindowSizeMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" CUSTOM = "Custom" class AetherTaskType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CLASSIFICATION = "Classification" REGRESSION = "Regression" FORECASTING = "Forecasting" IMAGE_CLASSIFICATION = "ImageClassification" IMAGE_CLASSIFICATION_MULTILABEL = "ImageClassificationMultilabel" IMAGE_OBJECT_DETECTION = "ImageObjectDetection" IMAGE_INSTANCE_SEGMENTATION = "ImageInstanceSegmentation" TEXT_CLASSIFICATION = "TextClassification" TEXT_MULTI_LABELING = "TextMultiLabeling" TEXT_NER = "TextNER" TEXT_CLASSIFICATION_MULTILABEL = "TextClassificationMultilabel" class AetherTrainingOutputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): METRICS = "Metrics" MODEL = "Model" class AetherUIScriptLanguageEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" PYTHON = "Python" R = "R" JSON = "Json" SQL = "Sql" class AetherUIWidgetTypeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DEFAULT = "Default" MODE = "Mode" COLUMN_PICKER = "ColumnPicker" CREDENTIAL = "Credential" SCRIPT = "Script" COMPUTE_SELECTION = "ComputeSelection" JSON_EDITOR = "JsonEditor" SEARCH_SPACE_PARAMETER = "SearchSpaceParameter" SECTION_TOGGLE = "SectionToggle" YAML_EDITOR = "YamlEditor" ENABLE_RUNTIME_SWEEP = "EnableRuntimeSweep" DATA_STORE_SELECTION = "DataStoreSelection" INSTANCE_TYPE_SELECTION = "InstanceTypeSelection" CONNECTION_SELECTION = "ConnectionSelection" PROMPT_FLOW_CONNECTION_SELECTION = "PromptFlowConnectionSelection" AZURE_OPEN_AI_DEPLOYMENT_NAME_SELECTION = "AzureOpenAIDeploymentNameSelection" class AetherUploadState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): UPLOADING = "Uploading" COMPLETED = "Completed" CANCELED = "Canceled" FAILED = "Failed" class AetherUseStl(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SEASON = "Season" SEASON_TREND = "SeasonTrend" class AEVAAssetType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): URI_FILE = "UriFile" URI_FOLDER = "UriFolder" ML_TABLE = "MLTable" CUSTOM_MODEL = "CustomModel" ML_FLOW_MODEL = "MLFlowModel" TRITON_MODEL = "TritonModel" OPEN_AI_MODEL = "OpenAIModel" class AEVADataStoreMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" MOUNT = "Mount" DOWNLOAD = "Download" UPLOAD = "Upload" DIRECT = "Direct" HDFS = "Hdfs" LINK = "Link" class AEVAIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): USER_IDENTITY = "UserIdentity" MANAGED = "Managed" AML_TOKEN = "AMLToken" class ApplicationEndpointType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): JUPYTER = "Jupyter" JUPYTER_LAB = "JupyterLab" SSH = "SSH" TENSOR_BOARD = "TensorBoard" VS_CODE = "VSCode" THEIA = "Theia" GRAFANA = "Grafana" CUSTOM = "Custom" RAY_DASHBOARD = "RayDashboard" class ArgumentValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): LITERAL = "Literal" PARAMETER = "Parameter" INPUT = "Input" OUTPUT = "Output" NESTED_LIST = "NestedList" STRING_INTERPOLATION_LIST = "StringInterpolationList" class AssetScopeTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): WORKSPACE = "Workspace" GLOBAL_ENUM = "Global" ALL = "All" FEED = "Feed" class AssetSourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): UNKNOWN = "Unknown" LOCAL = "Local" GITHUB_FILE = "GithubFile" GITHUB_FOLDER = "GithubFolder" DEVOPS_ARTIFACTS_ZIP = "DevopsArtifactsZip" class AssetType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): COMPONENT = "Component" MODEL = "Model" ENVIRONMENT = "Environment" DATASET = "Dataset" DATA_STORE = "DataStore" SAMPLE_GRAPH = "SampleGraph" FLOW_TOOL = "FlowTool" FLOW_TOOL_SETTING = "FlowToolSetting" FLOW_CONNECTION = "FlowConnection" FLOW_SAMPLE = "FlowSample" FLOW_RUNTIME_SPEC = "FlowRuntimeSpec" class AutoDeleteCondition(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CREATED_GREATER_THAN = "CreatedGreaterThan" LAST_ACCESSED_GREATER_THAN = "LastAccessedGreaterThan" class BuildContextLocationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): GIT = "Git" STORAGE_ACCOUNT = "StorageAccount" class Communicator(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" PARAMETER_SERVER = "ParameterServer" GLOO = "Gloo" MPI = "Mpi" NCCL = "Nccl" PARALLEL_TASK = "ParallelTask" class ComponentRegistrationTypeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NORMAL = "Normal" ANONYMOUS_AML_MODULE = "AnonymousAmlModule" ANONYMOUS_AML_MODULE_VERSION = "AnonymousAmlModuleVersion" MODULE_ENTITY_ONLY = "ModuleEntityOnly" class ComponentType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): UNKNOWN = "Unknown" COMMAND_COMPONENT = "CommandComponent" COMMAND = "Command" class ComputeEnvironmentType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ACI = "ACI" AKS = "AKS" AMLCOMPUTE = "AMLCOMPUTE" IOT = "IOT" AKSENDPOINT = "AKSENDPOINT" MIRSINGLEMODEL = "MIRSINGLEMODEL" MIRAMLCOMPUTE = "MIRAMLCOMPUTE" MIRGA = "MIRGA" AMLARC = "AMLARC" BATCHAMLCOMPUTE = "BATCHAMLCOMPUTE" UNKNOWN = "UNKNOWN" class ComputeTargetType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): LOCAL = "Local" REMOTE = "Remote" HDI_CLUSTER = "HdiCluster" CONTAINER_INSTANCE = "ContainerInstance" AML_COMPUTE = "AmlCompute" COMPUTE_INSTANCE = "ComputeInstance" CMK8_S = "Cmk8s" SYNAPSE_SPARK = "SynapseSpark" KUBERNETES = "Kubernetes" AISC = "Aisc" GLOBAL_JOB_DISPATCHER = "GlobalJobDispatcher" DATABRICKS = "Databricks" MOCKED_COMPUTE = "MockedCompute" class ComputeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): BATCH_AI = "BatchAi" MLC = "MLC" HDI_CLUSTER = "HdiCluster" REMOTE_DOCKER = "RemoteDocker" DATABRICKS = "Databricks" AISC = "Aisc" class ConfigValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): STRING = "String" SECRET = "Secret" class ConnectionCategory(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): PYTHON_FEED = "PythonFeed" ACR = "ACR" GIT = "Git" S3 = "S3" SNOWFLAKE = "Snowflake" AZURE_SQL_DB = "AzureSqlDb" AZURE_SYNAPSE_ANALYTICS = "AzureSynapseAnalytics" AZURE_MY_SQL_DB = "AzureMySqlDb" AZURE_POSTGRES_DB = "AzurePostgresDb" AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2" REDIS = "Redis" API_KEY = "ApiKey" AZURE_OPEN_AI = "AzureOpenAI" COGNITIVE_SEARCH = "CognitiveSearch" COGNITIVE_SERVICE = "CognitiveService" CUSTOM_KEYS = "CustomKeys" AZURE_BLOB = "AzureBlob" AZURE_ONE_LAKE = "AzureOneLake" COSMOS_DB = "CosmosDb" COSMOS_DB_MONGO_DB_API = "CosmosDbMongoDbApi" AZURE_DATA_EXPLORER = "AzureDataExplorer" AZURE_MARIA_DB = "AzureMariaDb" AZURE_DATABRICKS_DELTA_LAKE = "AzureDatabricksDeltaLake" AZURE_SQL_MI = "AzureSqlMi" AZURE_TABLE_STORAGE = "AzureTableStorage" AMAZON_RDS_FOR_ORACLE = "AmazonRdsForOracle" AMAZON_RDS_FOR_SQL_SERVER = "AmazonRdsForSqlServer" AMAZON_REDSHIFT = "AmazonRedshift" DB2 = "Db2" DRILL = "Drill" GOOGLE_BIG_QUERY = "GoogleBigQuery" GREENPLUM = "Greenplum" HBASE = "Hbase" HIVE = "Hive" IMPALA = "Impala" INFORMIX = "Informix" MARIA_DB = "MariaDb" MICROSOFT_ACCESS = "MicrosoftAccess" MY_SQL = "MySql" NETEZZA = "Netezza" ORACLE = "Oracle" PHOENIX = "Phoenix" POSTGRE_SQL = "PostgreSql" PRESTO = "Presto" SAP_OPEN_HUB = "SapOpenHub" SAP_BW = "SapBw" SAP_HANA = "SapHana" SAP_TABLE = "SapTable" SPARK = "Spark" SQL_SERVER = "SqlServer" SYBASE = "Sybase" TERADATA = "Teradata" VERTICA = "Vertica" CASSANDRA = "Cassandra" COUCHBASE = "Couchbase" MONGO_DB_V2 = "MongoDbV2" MONGO_DB_ATLAS = "MongoDbAtlas" AMAZON_S3_COMPATIBLE = "AmazonS3Compatible" FILE_SERVER = "FileServer" FTP_SERVER = "FtpServer" GOOGLE_CLOUD_STORAGE = "GoogleCloudStorage" HDFS = "Hdfs" ORACLE_CLOUD_STORAGE = "OracleCloudStorage" SFTP = "Sftp" GENERIC_HTTP = "GenericHttp" O_DATA_REST = "ODataRest" ODBC = "Odbc" GENERIC_REST = "GenericRest" AMAZON_MWS = "AmazonMws" CONCUR = "Concur" DYNAMICS = "Dynamics" DYNAMICS_AX = "DynamicsAx" DYNAMICS_CRM = "DynamicsCrm" GOOGLE_AD_WORDS = "GoogleAdWords" HUBSPOT = "Hubspot" JIRA = "Jira" MAGENTO = "Magento" MARKETO = "Marketo" OFFICE365 = "Office365" ELOQUA = "Eloqua" RESPONSYS = "Responsys" ORACLE_SERVICE_CLOUD = "OracleServiceCloud" PAY_PAL = "PayPal" QUICK_BOOKS = "QuickBooks" SALESFORCE = "Salesforce" SALESFORCE_SERVICE_CLOUD = "SalesforceServiceCloud" SALESFORCE_MARKETING_CLOUD = "SalesforceMarketingCloud" SAP_CLOUD_FOR_CUSTOMER = "SapCloudForCustomer" SAP_ECC = "SapEcc" SERVICE_NOW = "ServiceNow" SHARE_POINT_ONLINE_LIST = "SharePointOnlineList" SHOPIFY = "Shopify" SQUARE = "Square" WEB_TABLE = "WebTable" XERO = "Xero" ZOHO = "Zoho" GENERIC_CONTAINER_REGISTRY = "GenericContainerRegistry" class ConnectionScope(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): USER = "User" WORKSPACE_SHARED = "WorkspaceShared" class ConnectionSourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NODE = "Node" NODE_INPUT = "NodeInput" class ConnectionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): OPEN_AI = "OpenAI" AZURE_OPEN_AI = "AzureOpenAI" SERP = "Serp" BING = "Bing" AZURE_CONTENT_MODERATOR = "AzureContentModerator" CUSTOM = "Custom" AZURE_CONTENT_SAFETY = "AzureContentSafety" COGNITIVE_SEARCH = "CognitiveSearch" SUBSTRATE_LLM = "SubstrateLLM" PINECONE = "Pinecone" QDRANT = "Qdrant" WEAVIATE = "Weaviate" FORM_RECOGNIZER = "FormRecognizer" class ConsumeMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): REFERENCE = "Reference" COPY = "Copy" COPY_AND_AUTO_UPGRADE = "CopyAndAutoUpgrade" class ControlFlowType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" DO_WHILE = "DoWhile" PARALLEL_FOR = "ParallelFor" class ControlInputValue(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" FALSE = "False" TRUE = "True" SKIPPED = "Skipped" class DataBindingMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MOUNT = "Mount" DOWNLOAD = "Download" UPLOAD = "Upload" READ_ONLY_MOUNT = "ReadOnlyMount" READ_WRITE_MOUNT = "ReadWriteMount" DIRECT = "Direct" EVAL_MOUNT = "EvalMount" EVAL_DOWNLOAD = "EvalDownload" class DataCategory(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ALL = "All" DATASET = "Dataset" MODEL = "Model" class DataCopyMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MERGE_WITH_OVERWRITE = "MergeWithOverwrite" FAIL_IF_CONFLICT = "FailIfConflict" class DataLocationStorageType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" AZURE_BLOB = "AzureBlob" ARTIFACT = "Artifact" SNAPSHOT = "Snapshot" SAVED_AML_DATASET = "SavedAmlDataset" ASSET = "Asset" class DataPortType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): INPUT = "Input" OUTPUT = "Output" class DataReferenceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" AZURE_BLOB = "AzureBlob" AZURE_DATA_LAKE = "AzureDataLake" AZURE_FILES = "AzureFiles" AZURE_SQL_DATABASE = "AzureSqlDatabase" AZURE_POSTGRES_DATABASE = "AzurePostgresDatabase" AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2" DBFS = "DBFS" AZURE_MY_SQL_DATABASE = "AzureMySqlDatabase" CUSTOM = "Custom" HDFS = "Hdfs" class DatasetAccessModes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DEFAULT = "Default" DATASET_IN_DPV2 = "DatasetInDpv2" ASSET_IN_DPV2 = "AssetInDpv2" DATASET_IN_DESIGNER_UI = "DatasetInDesignerUI" DATASET_IN_DPV2_WITH_DATASET_IN_DESIGNER_UI = "DatasetInDpv2WithDatasetInDesignerUI" DATASET = "Dataset" ASSET_IN_DPV2_WITH_DATASET_IN_DESIGNER_UI = "AssetInDpv2WithDatasetInDesignerUI" DATASET_AND_ASSET_IN_DPV2_WITH_DATASET_IN_DESIGNER_UI = "DatasetAndAssetInDpv2WithDatasetInDesignerUI" ASSET_IN_DESIGNER_UI = "AssetInDesignerUI" ASSET_IN_DPV2_WITH_ASSET_IN_DESIGNER_UI = "AssetInDpv2WithAssetInDesignerUI" ASSET = "Asset" class DatasetConsumptionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): RUN_INPUT = "RunInput" REFERENCE = "Reference" class DatasetDeliveryMechanism(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DIRECT = "Direct" MOUNT = "Mount" DOWNLOAD = "Download" HDFS = "Hdfs" class DatasetOutputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): RUN_OUTPUT = "RunOutput" REFERENCE = "Reference" class DatasetType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): FILE = "File" TABULAR = "Tabular" class DataSourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" PIPELINE_DATA_SOURCE = "PipelineDataSource" AML_DATASET = "AmlDataset" GLOBAL_DATASET = "GlobalDataset" FEED_MODEL = "FeedModel" FEED_DATASET = "FeedDataset" AML_DATA_VERSION = "AmlDataVersion" AML_MODEL_VERSION = "AMLModelVersion" class DataStoreMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MOUNT = "Mount" DOWNLOAD = "Download" UPLOAD = "Upload" class DataTransferStorageType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DATA_BASE = "DataBase" FILE_SYSTEM = "FileSystem" class DataTransferTaskType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): IMPORT_DATA = "ImportData" EXPORT_DATA = "ExportData" COPY_DATA = "CopyData" class DataTypeMechanism(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ERROR_WHEN_NOT_EXISTING = "ErrorWhenNotExisting" REGISTER_WHEN_NOT_EXISTING = "RegisterWhenNotExisting" REGISTER_BUILDIN_DATA_TYPE_ONLY = "RegisterBuildinDataTypeOnly" class DeliveryMechanism(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DIRECT = "Direct" MOUNT = "Mount" DOWNLOAD = "Download" HDFS = "Hdfs" class DistributionParameterEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): TEXT = "Text" NUMBER = "Number" class DistributionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): PY_TORCH = "PyTorch" TENSOR_FLOW = "TensorFlow" MPI = "Mpi" RAY = "Ray" class EarlyTerminationPolicyType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): BANDIT = "Bandit" MEDIAN_STOPPING = "MedianStopping" TRUNCATION_SELECTION = "TruncationSelection" class EmailNotificationEnableType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): JOB_COMPLETED = "JobCompleted" JOB_FAILED = "JobFailed" JOB_CANCELLED = "JobCancelled" class EndpointAuthMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AML_TOKEN = "AMLToken" KEY = "Key" AAD_TOKEN = "AADToken" class EntityKind(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): INVALID = "Invalid" LINEAGE_ROOT = "LineageRoot" VERSIONED = "Versioned" UNVERSIONED = "Unversioned" class EntityStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ACTIVE = "Active" DEPRECATED = "Deprecated" DISABLED = "Disabled" class ErrorHandlingMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DEFAULT_INTERPOLATION = "DefaultInterpolation" CUSTOMER_FACING_INTERPOLATION = "CustomerFacingInterpolation" class ExecutionPhase(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): EXECUTION = "Execution" INITIALIZATION = "Initialization" FINALIZATION = "Finalization" class FeaturizationMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" CUSTOM = "Custom" OFF = "Off" class FlowFeatureStateEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): READY = "Ready" E2_E_TEST = "E2ETest" class FlowLanguage(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): PYTHON = "Python" C_SHARP = "CSharp" class FlowPatchOperationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ARCHIVE_FLOW = "ArchiveFlow" RESTORE_FLOW = "RestoreFlow" EXPORT_FLOW_TO_FILE = "ExportFlowToFile" class FlowRunMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): FLOW = "Flow" SINGLE_NODE = "SingleNode" FROM_NODE = "FromNode" BULK_TEST = "BulkTest" EVAL = "Eval" PAIRWISE_EVAL = "PairwiseEval" class FlowRuntimeSubmissionApiVersion(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): VERSION1 = "Version1" VERSION2 = "Version2" class FlowRunTypeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): FLOW_RUN = "FlowRun" EVALUATION_RUN = "EvaluationRun" PAIRWISE_EVALUATION_RUN = "PairwiseEvaluationRun" SINGLE_NODE_RUN = "SingleNodeRun" FROM_NODE_RUN = "FromNodeRun" class FlowTestMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SYNC = "Sync" ASYNC_ENUM = "Async" class FlowType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DEFAULT = "Default" EVALUATION = "Evaluation" CHAT = "Chat" RAG = "Rag" class ForecastHorizonMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" CUSTOM = "Custom" class Framework(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): PYTHON = "Python" PY_SPARK = "PySpark" CNTK = "Cntk" TENSOR_FLOW = "TensorFlow" PY_TORCH = "PyTorch" PY_SPARK_INTERACTIVE = "PySparkInteractive" R = "R" class Frequency(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MONTH = "Month" WEEK = "Week" DAY = "Day" HOUR = "Hour" MINUTE = "Minute" class GlobalJobDispatcherSupportedComputeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AML_COMPUTE = "AmlCompute" AML_K8_S = "AmlK8s" class GraphComponentsMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NORMAL = "Normal" ALL_DESIGNER_BUILDIN = "AllDesignerBuildin" CONTAINS_DESIGNER_BUILDIN = "ContainsDesignerBuildin" class GraphDatasetsLoadModes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SKIP_DATASETS_LOAD = "SkipDatasetsLoad" V1_REGISTERED_DATASET = "V1RegisteredDataset" V1_SAVED_DATASET = "V1SavedDataset" PERSIST_DATASETS_INFO = "PersistDatasetsInfo" SUBMISSION_NEEDED_UPSTREAM_DATASET_ONLY = "SubmissionNeededUpstreamDatasetOnly" SUBMISSION_NEEDED_IN_COMPLETE_DATASET_ONLY = "SubmissionNeededInCompleteDatasetOnly" V2_ASSET = "V2Asset" SUBMISSION = "Submission" ALL_REGISTERED_DATA = "AllRegisteredData" ALL_DATA = "AllData" class GraphSdkCodeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): PYTHON = "Python" JUPYTER_NOTEBOOK = "JupyterNotebook" UNKNOWN = "Unknown" class HttpStatusCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CONTINUE_ENUM = "Continue" SWITCHING_PROTOCOLS = "SwitchingProtocols" PROCESSING = "Processing" EARLY_HINTS = "EarlyHints" OK = "OK" CREATED = "Created" ACCEPTED = "Accepted" NON_AUTHORITATIVE_INFORMATION = "NonAuthoritativeInformation" NO_CONTENT = "NoContent" RESET_CONTENT = "ResetContent" PARTIAL_CONTENT = "PartialContent" MULTI_STATUS = "MultiStatus" ALREADY_REPORTED = "AlreadyReported" IM_USED = "IMUsed" MULTIPLE_CHOICES = "MultipleChoices" AMBIGUOUS = "Ambiguous" MOVED_PERMANENTLY = "MovedPermanently" MOVED = "Moved" FOUND = "Found" REDIRECT = "Redirect" SEE_OTHER = "SeeOther" REDIRECT_METHOD = "RedirectMethod" NOT_MODIFIED = "NotModified" USE_PROXY = "UseProxy" UNUSED = "Unused" TEMPORARY_REDIRECT = "TemporaryRedirect" REDIRECT_KEEP_VERB = "RedirectKeepVerb" PERMANENT_REDIRECT = "PermanentRedirect" BAD_REQUEST = "BadRequest" UNAUTHORIZED = "Unauthorized" PAYMENT_REQUIRED = "PaymentRequired" FORBIDDEN = "Forbidden" NOT_FOUND = "NotFound" METHOD_NOT_ALLOWED = "MethodNotAllowed" NOT_ACCEPTABLE = "NotAcceptable" PROXY_AUTHENTICATION_REQUIRED = "ProxyAuthenticationRequired" REQUEST_TIMEOUT = "RequestTimeout" CONFLICT = "Conflict" GONE = "Gone" LENGTH_REQUIRED = "LengthRequired" PRECONDITION_FAILED = "PreconditionFailed" REQUEST_ENTITY_TOO_LARGE = "RequestEntityTooLarge" REQUEST_URI_TOO_LONG = "RequestUriTooLong" UNSUPPORTED_MEDIA_TYPE = "UnsupportedMediaType" REQUESTED_RANGE_NOT_SATISFIABLE = "RequestedRangeNotSatisfiable" EXPECTATION_FAILED = "ExpectationFailed" MISDIRECTED_REQUEST = "MisdirectedRequest" UNPROCESSABLE_ENTITY = "UnprocessableEntity" LOCKED = "Locked" FAILED_DEPENDENCY = "FailedDependency" UPGRADE_REQUIRED = "UpgradeRequired" PRECONDITION_REQUIRED = "PreconditionRequired" TOO_MANY_REQUESTS = "TooManyRequests" REQUEST_HEADER_FIELDS_TOO_LARGE = "RequestHeaderFieldsTooLarge" UNAVAILABLE_FOR_LEGAL_REASONS = "UnavailableForLegalReasons" INTERNAL_SERVER_ERROR = "InternalServerError" NOT_IMPLEMENTED = "NotImplemented" BAD_GATEWAY = "BadGateway" SERVICE_UNAVAILABLE = "ServiceUnavailable" GATEWAY_TIMEOUT = "GatewayTimeout" HTTP_VERSION_NOT_SUPPORTED = "HttpVersionNotSupported" VARIANT_ALSO_NEGOTIATES = "VariantAlsoNegotiates" INSUFFICIENT_STORAGE = "InsufficientStorage" LOOP_DETECTED = "LoopDetected" NOT_EXTENDED = "NotExtended" NETWORK_AUTHENTICATION_REQUIRED = "NetworkAuthenticationRequired" class IdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MANAGED = "Managed" SERVICE_PRINCIPAL = "ServicePrincipal" AML_TOKEN = "AMLToken" class InputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DEFAULT = "default" UIONLY_HIDDEN = "uionly_hidden" class IntellectualPropertyAccessMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): READ_ONLY = "ReadOnly" READ_WRITE = "ReadWrite" class JobInputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DATASET = "Dataset" URI = "Uri" LITERAL = "Literal" URI_FILE = "UriFile" URI_FOLDER = "UriFolder" ML_TABLE = "MLTable" CUSTOM_MODEL = "CustomModel" ML_FLOW_MODEL = "MLFlowModel" TRITON_MODEL = "TritonModel" class JobLimitsType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): COMMAND = "Command" SWEEP = "Sweep" class JobOutputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): URI = "Uri" DATASET = "Dataset" URI_FILE = "UriFile" URI_FOLDER = "UriFolder" ML_TABLE = "MLTable" CUSTOM_MODEL = "CustomModel" ML_FLOW_MODEL = "MLFlowModel" TRITON_MODEL = "TritonModel" class JobProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SUCCEEDED = "Succeeded" FAILED = "Failed" CANCELED = "Canceled" IN_PROGRESS = "InProgress" class JobStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NOT_STARTED = "NotStarted" STARTING = "Starting" PROVISIONING = "Provisioning" PREPARING = "Preparing" QUEUED = "Queued" RUNNING = "Running" FINALIZING = "Finalizing" CANCEL_REQUESTED = "CancelRequested" COMPLETED = "Completed" FAILED = "Failed" CANCELED = "Canceled" NOT_RESPONDING = "NotResponding" PAUSED = "Paused" UNKNOWN = "Unknown" SCHEDULED = "Scheduled" class JobType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): COMMAND = "Command" SWEEP = "Sweep" LABELING = "Labeling" PIPELINE = "Pipeline" DATA = "Data" AUTO_ML = "AutoML" SPARK = "Spark" BASE = "Base" class KeyType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): PRIMARY = "Primary" SECONDARY = "Secondary" class ListViewType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ACTIVE_ONLY = "ActiveOnly" ARCHIVED_ONLY = "ArchivedOnly" ALL = "All" class LogLevel(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): TRACE = "Trace" DEBUG = "Debug" INFORMATION = "Information" WARNING = "Warning" ERROR = "Error" CRITICAL = "Critical" NONE = "None" class LogVerbosity(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NOT_SET = "NotSet" DEBUG = "Debug" INFO = "Info" WARNING = "Warning" ERROR = "Error" CRITICAL = "Critical" class LongRunningUpdateType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ENABLE_MODULE = "EnableModule" DISABLE_MODULE = "DisableModule" UPDATE_DISPLAY_NAME = "UpdateDisplayName" UPDATE_DESCRIPTION = "UpdateDescription" UPDATE_TAGS = "UpdateTags" class ManagedServiceIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SYSTEM_ASSIGNED = "SystemAssigned" USER_ASSIGNED = "UserAssigned" SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssignedUserAssigned" NONE = "None" class MetricValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): INT = "Int" DOUBLE = "Double" STRING = "String" BOOL = "Bool" ARTIFACT = "Artifact" HISTOGRAM = "Histogram" MALFORMED = "Malformed" class MfeInternalIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MANAGED = "Managed" AML_TOKEN = "AMLToken" USER_IDENTITY = "UserIdentity" class MfeInternalMLFlowAutologgerState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ENABLED = "Enabled" DISABLED = "Disabled" class MfeInternalScheduleStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ENABLED = "Enabled" DISABLED = "Disabled" class MLFlowAutologgerState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ENABLED = "Enabled" DISABLED = "Disabled" class ModuleDtoFields(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DEFINITION = "Definition" YAML_STR = "YamlStr" REGISTRATION_CONTEXT = "RegistrationContext" RUN_SETTING_PARAMETERS = "RunSettingParameters" RUN_DEFINITION = "RunDefinition" ALL = "All" DEFAULT = "Default" BASIC = "Basic" MINIMAL = "Minimal" class ModuleInfoFromYamlStatusEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NEW_MODULE = "NewModule" NEW_VERSION = "NewVersion" CONFLICT = "Conflict" PARSE_ERROR = "ParseError" PROCESS_REQUEST_ERROR = "ProcessRequestError" class ModuleRunSettingTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ALL = "All" RELEASED = "Released" DEFAULT = "Default" TESTING = "Testing" LEGACY = "Legacy" PREVIEW = "Preview" UX_FULL = "UxFull" INTEGRATION = "Integration" UX_INTEGRATION = "UxIntegration" FULL = "Full" class ModuleScope(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ALL = "All" GLOBAL_ENUM = "Global" WORKSPACE = "Workspace" ANONYMOUS = "Anonymous" STEP = "Step" DRAFT = "Draft" FEED = "Feed" REGISTRY = "Registry" SYSTEM_AUTO_CREATED = "SystemAutoCreated" class ModuleSourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): UNKNOWN = "Unknown" LOCAL = "Local" GITHUB_FILE = "GithubFile" GITHUB_FOLDER = "GithubFolder" DEVOPS_ARTIFACTS_ZIP = "DevopsArtifactsZip" SERIALIZED_MODULE_INFO = "SerializedModuleInfo" class ModuleType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" BATCH_INFERENCING = "BatchInferencing" class ModuleUpdateOperationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SET_DEFAULT_VERSION = "SetDefaultVersion" ENABLE_MODULE = "EnableModule" DISABLE_MODULE = "DisableModule" UPDATE_DISPLAY_NAME = "UpdateDisplayName" UPDATE_DESCRIPTION = "UpdateDescription" UPDATE_TAGS = "UpdateTags" class ModuleWorkingMechanism(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NORMAL = "Normal" OUTPUT_TO_DATASET = "OutputToDataset" class NCrossValidationMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" CUSTOM = "Custom" class NodeCompositionMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" ONLY_SEQUENTIAL = "OnlySequential" FULL = "Full" class NodesValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ALL = "All" CUSTOM = "Custom" class Orientation(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): HORIZONTAL = "Horizontal" VERTICAL = "Vertical" class OutputMechanism(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): UPLOAD = "Upload" MOUNT = "Mount" HDFS = "Hdfs" LINK = "Link" DIRECT = "Direct" class ParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): INT = "Int" DOUBLE = "Double" BOOL = "Bool" STRING = "String" UNDEFINED = "Undefined" class ParameterValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): LITERAL = "Literal" GRAPH_PARAMETER_NAME = "GraphParameterName" CONCATENATE = "Concatenate" INPUT = "Input" DATA_PATH = "DataPath" DATA_SET_DEFINITION = "DataSetDefinition" class PipelineDraftMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" NORMAL = "Normal" CUSTOM = "Custom" class PipelineRunStatusCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NOT_STARTED = "NotStarted" RUNNING = "Running" FAILED = "Failed" FINISHED = "Finished" CANCELED = "Canceled" QUEUED = "Queued" CANCEL_REQUESTED = "CancelRequested" class PipelineStatusCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NOT_STARTED = "NotStarted" IN_DRAFT = "InDraft" PREPARING = "Preparing" RUNNING = "Running" FAILED = "Failed" FINISHED = "Finished" CANCELED = "Canceled" THROTTLED = "Throttled" UNKNOWN = "Unknown" class PipelineType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): TRAINING_PIPELINE = "TrainingPipeline" REAL_TIME_INFERENCE_PIPELINE = "RealTimeInferencePipeline" BATCH_INFERENCE_PIPELINE = "BatchInferencePipeline" UNKNOWN = "Unknown" class PortAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): PROMOTE = "Promote" VIEW_IN_DATA_STORE = "ViewInDataStore" VISUALIZE = "Visualize" GET_SCHEMA = "GetSchema" CREATE_INFERENCE_GRAPH = "CreateInferenceGraph" REGISTER_MODEL = "RegisterModel" PROMOTE_AS_TABULAR = "PromoteAsTabular" class PrimaryMetrics(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUC_WEIGHTED = "AUCWeighted" ACCURACY = "Accuracy" NORM_MACRO_RECALL = "NormMacroRecall" AVERAGE_PRECISION_SCORE_WEIGHTED = "AveragePrecisionScoreWeighted" PRECISION_SCORE_WEIGHTED = "PrecisionScoreWeighted" SPEARMAN_CORRELATION = "SpearmanCorrelation" NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "NormalizedRootMeanSquaredError" R2_SCORE = "R2Score" NORMALIZED_MEAN_ABSOLUTE_ERROR = "NormalizedMeanAbsoluteError" NORMALIZED_ROOT_MEAN_SQUARED_LOG_ERROR = "NormalizedRootMeanSquaredLogError" MEAN_AVERAGE_PRECISION = "MeanAveragePrecision" IOU = "Iou" class ProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): UNKNOWN = "Unknown" UPDATING = "Updating" CREATING = "Creating" DELETING = "Deleting" ACCEPTED = "Accepted" SUCCEEDED = "Succeeded" FAILED = "Failed" CANCELED = "Canceled" class RealTimeEndpointInternalStepCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ABOUT_TO_DEPLOY = "AboutToDeploy" WAIT_AKS_COMPUTE_READY = "WaitAksComputeReady" REGISTER_MODELS = "RegisterModels" CREATE_SERVICE_FROM_MODELS = "CreateServiceFromModels" UPDATE_SERVICE_FROM_MODELS = "UpdateServiceFromModels" WAIT_SERVICE_CREATING = "WaitServiceCreating" FETCH_SERVICE_RELATED_INFO = "FetchServiceRelatedInfo" TEST_WITH_SAMPLE_DATA = "TestWithSampleData" ABOUT_TO_DELETE = "AboutToDelete" DELETE_DEPLOYMENT = "DeleteDeployment" DELETE_ASSET = "DeleteAsset" DELETE_IMAGE = "DeleteImage" DELETE_MODEL = "DeleteModel" DELETE_SERVICE_RECORD = "DeleteServiceRecord" class RealTimeEndpointOpCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CREATE = "Create" UPDATE = "Update" DELETE = "Delete" class RealTimeEndpointOpStatusCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ONGOING = "Ongoing" SUCCEEDED = "Succeeded" FAILED = "Failed" SUCCEEDED_WITH_WARNING = "SucceededWithWarning" class RecurrenceFrequency(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MINUTE = "Minute" HOUR = "Hour" DAY = "Day" WEEK = "Week" MONTH = "Month" class RunDisplayNameGenerationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO_APPEND = "AutoAppend" USER_PROVIDED_MACRO = "UserProvidedMacro" class RunSettingParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): UNDEFINED = "Undefined" INT = "Int" DOUBLE = "Double" BOOL = "Bool" STRING = "String" JSON_STRING = "JsonString" YAML_STRING = "YamlString" STRING_LIST = "StringList" class RunSettingUIWidgetTypeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DEFAULT = "Default" COMPUTE_SELECTION = "ComputeSelection" JSON_EDITOR = "JsonEditor" MODE = "Mode" SEARCH_SPACE_PARAMETER = "SearchSpaceParameter" SECTION_TOGGLE = "SectionToggle" YAML_EDITOR = "YamlEditor" ENABLE_RUNTIME_SWEEP = "EnableRuntimeSweep" DATA_STORE_SELECTION = "DataStoreSelection" CHECKBOX = "Checkbox" MULTIPLE_SELECTION = "MultipleSelection" HYPERPARAMETER_CONFIGURATION = "HyperparameterConfiguration" JSON_TEXT_BOX = "JsonTextBox" CONNECTION = "Connection" STATIC = "Static" class RunStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NOT_STARTED = "NotStarted" UNAPPROVED = "Unapproved" PAUSING = "Pausing" PAUSED = "Paused" STARTING = "Starting" PREPARING = "Preparing" QUEUED = "Queued" RUNNING = "Running" FINALIZING = "Finalizing" CANCEL_REQUESTED = "CancelRequested" COMPLETED = "Completed" FAILED = "Failed" CANCELED = "Canceled" class RuntimeStatusEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): UNAVAILABLE = "Unavailable" FAILED = "Failed" NOT_EXIST = "NotExist" STARTING = "Starting" STOPPING = "Stopping" class RuntimeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MANAGED_ONLINE_ENDPOINT = "ManagedOnlineEndpoint" COMPUTE_INSTANCE = "ComputeInstance" TRAINING_SESSION = "TrainingSession" class RunType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): HTTP = "HTTP" SDK = "SDK" SCHEDULE = "Schedule" PORTAL = "Portal" class SamplingAlgorithmType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): RANDOM = "Random" GRID = "Grid" BAYESIAN = "Bayesian" class ScheduleProvisioningStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CREATING = "Creating" UPDATING = "Updating" DELETING = "Deleting" SUCCEEDED = "Succeeded" FAILED = "Failed" CANCELED = "Canceled" class ScheduleStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ENABLED = "Enabled" DISABLED = "Disabled" class ScheduleType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CRON = "Cron" RECURRENCE = "Recurrence" class ScopeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): GLOBAL_ENUM = "Global" TENANT = "Tenant" SUBSCRIPTION = "Subscription" RESOURCE_GROUP = "ResourceGroup" WORKSPACE = "Workspace" class ScriptType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): PYTHON = "Python" NOTEBOOK = "Notebook" class SeasonalityMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" CUSTOM = "Custom" class Section(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): GALLERY = "Gallery" TEMPLATE = "Template" class SessionSetupModeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CLIENT_WAIT = "ClientWait" SYSTEM_WAIT = "SystemWait" class SetupFlowSessionAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): INSTALL = "Install" RESET = "Reset" UPDATE = "Update" DELETE = "Delete" class SeverityLevel(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CRITICAL = "Critical" ERROR = "Error" WARNING = "Warning" INFO = "Info" class ShortSeriesHandlingConfiguration(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" PAD = "Pad" DROP = "Drop" class StackMetaLearnerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" LOGISTIC_REGRESSION = "LogisticRegression" LOGISTIC_REGRESSION_CV = "LogisticRegressionCV" LIGHT_GBM_CLASSIFIER = "LightGBMClassifier" ELASTIC_NET = "ElasticNet" ELASTIC_NET_CV = "ElasticNetCV" LIGHT_GBM_REGRESSOR = "LightGBMRegressor" LINEAR_REGRESSION = "LinearRegression" class StorageAuthType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MSI = "MSI" CONNECTION_STRING = "ConnectionString" SAS = "SAS" class StoredProcedureParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): STRING = "String" INT = "Int" DECIMAL = "Decimal" GUID = "Guid" BOOLEAN = "Boolean" DATE = "Date" class SuccessfulCommandReturnCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): ZERO = "Zero" ZERO_OR_GREATER = "ZeroOrGreater" class TabularTrainingMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DISTRIBUTED = "Distributed" NON_DISTRIBUTED = "NonDistributed" AUTO = "Auto" class TargetAggregationFunction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SUM = "Sum" MAX = "Max" MIN = "Min" MEAN = "Mean" class TargetLagsMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" CUSTOM = "Custom" class TargetRollingWindowSizeMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): AUTO = "Auto" CUSTOM = "Custom" class TaskCreationOptions(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" PREFER_FAIRNESS = "PreferFairness" LONG_RUNNING = "LongRunning" ATTACHED_TO_PARENT = "AttachedToParent" DENY_CHILD_ATTACH = "DenyChildAttach" HIDE_SCHEDULER = "HideScheduler" RUN_CONTINUATIONS_ASYNCHRONOUSLY = "RunContinuationsAsynchronously" class TaskStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CREATED = "Created" WAITING_FOR_ACTIVATION = "WaitingForActivation" WAITING_TO_RUN = "WaitingToRun" RUNNING = "Running" WAITING_FOR_CHILDREN_TO_COMPLETE = "WaitingForChildrenToComplete" RAN_TO_COMPLETION = "RanToCompletion" CANCELED = "Canceled" FAULTED = "Faulted" class TaskStatusCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NOT_STARTED = "NotStarted" QUEUED = "Queued" RUNNING = "Running" FAILED = "Failed" FINISHED = "Finished" CANCELED = "Canceled" PARTIALLY_EXECUTED = "PartiallyExecuted" BYPASSED = "Bypassed" class TaskType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CLASSIFICATION = "Classification" REGRESSION = "Regression" FORECASTING = "Forecasting" IMAGE_CLASSIFICATION = "ImageClassification" IMAGE_CLASSIFICATION_MULTILABEL = "ImageClassificationMultilabel" IMAGE_OBJECT_DETECTION = "ImageObjectDetection" IMAGE_INSTANCE_SEGMENTATION = "ImageInstanceSegmentation" TEXT_CLASSIFICATION = "TextClassification" TEXT_MULTI_LABELING = "TextMultiLabeling" TEXT_NER = "TextNER" TEXT_CLASSIFICATION_MULTILABEL = "TextClassificationMultilabel" class ToolFuncCallScenario(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): GENERATED_BY = "generated_by" REVERSE_GENERATED_BY = "reverse_generated_by" DYNAMIC_LIST = "dynamic_list" class ToolState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): STABLE = "Stable" PREVIEW = "Preview" DEPRECATED = "Deprecated" class ToolType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): LLM = "llm" PYTHON = "python" ACTION = "action" PROMPT = "prompt" CUSTOM_LLM = "custom_llm" CSHARP = "csharp" class TrainingOutputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): METRICS = "Metrics" MODEL = "Model" class TriggerOperationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): CREATE = "Create" UPDATE = "Update" DELETE = "Delete" CREATE_OR_UPDATE = "CreateOrUpdate" class TriggerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): RECURRENCE = "Recurrence" CRON = "Cron" class UIInputDataDeliveryMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): READ_ONLY_MOUNT = "Read-only mount" READ_WRITE_MOUNT = "Read-write mount" DOWNLOAD = "Download" DIRECT = "Direct" EVALUATE_MOUNT = "Evaluate mount" EVALUATE_DOWNLOAD = "Evaluate download" HDFS = "Hdfs" class UIScriptLanguageEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" PYTHON = "Python" R = "R" JSON = "Json" SQL = "Sql" class UIWidgetTypeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DEFAULT = "Default" MODE = "Mode" COLUMN_PICKER = "ColumnPicker" CREDENTIAL = "Credential" SCRIPT = "Script" COMPUTE_SELECTION = "ComputeSelection" JSON_EDITOR = "JsonEditor" SEARCH_SPACE_PARAMETER = "SearchSpaceParameter" SECTION_TOGGLE = "SectionToggle" YAML_EDITOR = "YamlEditor" ENABLE_RUNTIME_SWEEP = "EnableRuntimeSweep" DATA_STORE_SELECTION = "DataStoreSelection" INSTANCE_TYPE_SELECTION = "InstanceTypeSelection" CONNECTION_SELECTION = "ConnectionSelection" PROMPT_FLOW_CONNECTION_SELECTION = "PromptFlowConnectionSelection" AZURE_OPEN_AI_DEPLOYMENT_NAME_SELECTION = "AzureOpenAIDeploymentNameSelection" class UploadState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): UPLOADING = "Uploading" COMPLETED = "Completed" CANCELED = "Canceled" FAILED = "Failed" class UserType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): USER = "User" APPLICATION = "Application" MANAGED_IDENTITY = "ManagedIdentity" KEY = "Key" class UseStl(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SEASON = "Season" SEASON_TREND = "SeasonTrend" class ValidationStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SUCCEEDED = "Succeeded" FAILED = "Failed" class ValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): INT = "int" DOUBLE = "double" BOOL = "bool" STRING = "string" SECRET = "secret" PROMPT_TEMPLATE = "prompt_template" OBJECT = "object" LIST = "list" BING_CONNECTION = "BingConnection" OPEN_AI_CONNECTION = "OpenAIConnection" AZURE_OPEN_AI_CONNECTION = "AzureOpenAIConnection" AZURE_CONTENT_MODERATOR_CONNECTION = "AzureContentModeratorConnection" CUSTOM_CONNECTION = "CustomConnection" AZURE_CONTENT_SAFETY_CONNECTION = "AzureContentSafetyConnection" SERP_CONNECTION = "SerpConnection" COGNITIVE_SEARCH_CONNECTION = "CognitiveSearchConnection" SUBSTRATE_LLM_CONNECTION = "SubstrateLLMConnection" PINECONE_CONNECTION = "PineconeConnection" QDRANT_CONNECTION = "QdrantConnection" WEAVIATE_CONNECTION = "WeaviateConnection" FUNCTION_LIST = "function_list" FUNCTION_STR = "function_str" FORM_RECOGNIZER_CONNECTION = "FormRecognizerConnection" FILE_PATH = "file_path" IMAGE = "image" class VmPriority(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DEDICATED = "Dedicated" LOWPRIORITY = "Lowpriority" class WebServiceState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): TRANSITIONING = "Transitioning" HEALTHY = "Healthy" UNHEALTHY = "Unhealthy" FAILED = "Failed" UNSCHEDULABLE = "Unschedulable" class Weekday(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MONDAY = "Monday" TUESDAY = "Tuesday" WEDNESDAY = "Wednesday" THURSDAY = "Thursday" FRIDAY = "Friday" SATURDAY = "Saturday" SUNDAY = "Sunday" class WeekDays(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MONDAY = "Monday" TUESDAY = "Tuesday" WEDNESDAY = "Wednesday" THURSDAY = "Thursday" FRIDAY = "Friday" SATURDAY = "Saturday" SUNDAY = "Sunday" class YarnDeployMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" CLIENT = "Client" CLUSTER = "Cluster"
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/models/_models.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from azure.core.exceptions import HttpResponseError import msrest.serialization class ACIAdvanceSettings(msrest.serialization.Model): """ACIAdvanceSettings. :ivar container_resource_requirements: :vartype container_resource_requirements: ~flow.models.ContainerResourceRequirements :ivar app_insights_enabled: :vartype app_insights_enabled: bool :ivar ssl_enabled: :vartype ssl_enabled: bool :ivar ssl_certificate: :vartype ssl_certificate: str :ivar ssl_key: :vartype ssl_key: str :ivar c_name: :vartype c_name: str :ivar dns_name_label: :vartype dns_name_label: str """ _attribute_map = { 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'}, 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, 'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'}, 'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'}, 'ssl_key': {'key': 'sslKey', 'type': 'str'}, 'c_name': {'key': 'cName', 'type': 'str'}, 'dns_name_label': {'key': 'dnsNameLabel', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword container_resource_requirements: :paramtype container_resource_requirements: ~flow.models.ContainerResourceRequirements :keyword app_insights_enabled: :paramtype app_insights_enabled: bool :keyword ssl_enabled: :paramtype ssl_enabled: bool :keyword ssl_certificate: :paramtype ssl_certificate: str :keyword ssl_key: :paramtype ssl_key: str :keyword c_name: :paramtype c_name: str :keyword dns_name_label: :paramtype dns_name_label: str """ super(ACIAdvanceSettings, self).__init__(**kwargs) self.container_resource_requirements = kwargs.get('container_resource_requirements', None) self.app_insights_enabled = kwargs.get('app_insights_enabled', None) self.ssl_enabled = kwargs.get('ssl_enabled', None) self.ssl_certificate = kwargs.get('ssl_certificate', None) self.ssl_key = kwargs.get('ssl_key', None) self.c_name = kwargs.get('c_name', None) self.dns_name_label = kwargs.get('dns_name_label', None) class Activate(msrest.serialization.Model): """Activate. :ivar when: :vartype when: str :ivar is_property: Anything. :vartype is_property: any """ _attribute_map = { 'when': {'key': 'when', 'type': 'str'}, 'is_property': {'key': 'is', 'type': 'object'}, } def __init__( self, **kwargs ): """ :keyword when: :paramtype when: str :keyword is_property: Anything. :paramtype is_property: any """ super(Activate, self).__init__(**kwargs) self.when = kwargs.get('when', None) self.is_property = kwargs.get('is_property', None) class AdditionalErrorInfo(msrest.serialization.Model): """AdditionalErrorInfo. :ivar type: :vartype type: str :ivar info: Anything. :vartype info: any """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'info': {'key': 'info', 'type': 'object'}, } def __init__( self, **kwargs ): """ :keyword type: :paramtype type: str :keyword info: Anything. :paramtype info: any """ super(AdditionalErrorInfo, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.info = kwargs.get('info', None) class AdhocTriggerScheduledCommandJobRequest(msrest.serialization.Model): """AdhocTriggerScheduledCommandJobRequest. :ivar job_name: :vartype job_name: str :ivar job_display_name: :vartype job_display_name: str :ivar trigger_time_string: :vartype trigger_time_string: str """ _attribute_map = { 'job_name': {'key': 'jobName', 'type': 'str'}, 'job_display_name': {'key': 'jobDisplayName', 'type': 'str'}, 'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword job_name: :paramtype job_name: str :keyword job_display_name: :paramtype job_display_name: str :keyword trigger_time_string: :paramtype trigger_time_string: str """ super(AdhocTriggerScheduledCommandJobRequest, self).__init__(**kwargs) self.job_name = kwargs.get('job_name', None) self.job_display_name = kwargs.get('job_display_name', None) self.trigger_time_string = kwargs.get('trigger_time_string', None) class AdhocTriggerScheduledSparkJobRequest(msrest.serialization.Model): """AdhocTriggerScheduledSparkJobRequest. :ivar job_name: :vartype job_name: str :ivar job_display_name: :vartype job_display_name: str :ivar trigger_time_string: :vartype trigger_time_string: str """ _attribute_map = { 'job_name': {'key': 'jobName', 'type': 'str'}, 'job_display_name': {'key': 'jobDisplayName', 'type': 'str'}, 'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword job_name: :paramtype job_name: str :keyword job_display_name: :paramtype job_display_name: str :keyword trigger_time_string: :paramtype trigger_time_string: str """ super(AdhocTriggerScheduledSparkJobRequest, self).__init__(**kwargs) self.job_name = kwargs.get('job_name', None) self.job_display_name = kwargs.get('job_display_name', None) self.trigger_time_string = kwargs.get('trigger_time_string', None) class AetherAmlDataset(msrest.serialization.Model): """AetherAmlDataset. :ivar registered_data_set_reference: :vartype registered_data_set_reference: ~flow.models.AetherRegisteredDataSetReference :ivar saved_data_set_reference: :vartype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference :ivar additional_transformations: :vartype additional_transformations: str """ _attribute_map = { 'registered_data_set_reference': {'key': 'registeredDataSetReference', 'type': 'AetherRegisteredDataSetReference'}, 'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'AetherSavedDataSetReference'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword registered_data_set_reference: :paramtype registered_data_set_reference: ~flow.models.AetherRegisteredDataSetReference :keyword saved_data_set_reference: :paramtype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference :keyword additional_transformations: :paramtype additional_transformations: str """ super(AetherAmlDataset, self).__init__(**kwargs) self.registered_data_set_reference = kwargs.get('registered_data_set_reference', None) self.saved_data_set_reference = kwargs.get('saved_data_set_reference', None) self.additional_transformations = kwargs.get('additional_transformations', None) class AetherAmlSparkCloudSetting(msrest.serialization.Model): """AetherAmlSparkCloudSetting. :ivar entry: :vartype entry: ~flow.models.AetherEntrySetting :ivar files: :vartype files: list[str] :ivar archives: :vartype archives: list[str] :ivar jars: :vartype jars: list[str] :ivar py_files: :vartype py_files: list[str] :ivar driver_memory: :vartype driver_memory: str :ivar driver_cores: :vartype driver_cores: int :ivar executor_memory: :vartype executor_memory: str :ivar executor_cores: :vartype executor_cores: int :ivar number_executors: :vartype number_executors: int :ivar environment_asset_id: :vartype environment_asset_id: str :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar inline_environment_definition_string: :vartype inline_environment_definition_string: str :ivar conf: Dictionary of :code:`<string>`. :vartype conf: dict[str, str] :ivar compute: :vartype compute: str :ivar resources: :vartype resources: ~flow.models.AetherResourcesSetting :ivar identity: :vartype identity: ~flow.models.AetherIdentitySetting """ _attribute_map = { 'entry': {'key': 'entry', 'type': 'AetherEntrySetting'}, 'files': {'key': 'files', 'type': '[str]'}, 'archives': {'key': 'archives', 'type': '[str]'}, 'jars': {'key': 'jars', 'type': '[str]'}, 'py_files': {'key': 'pyFiles', 'type': '[str]'}, 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, 'driver_cores': {'key': 'driverCores', 'type': 'int'}, 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, 'executor_cores': {'key': 'executorCores', 'type': 'int'}, 'number_executors': {'key': 'numberExecutors', 'type': 'int'}, 'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'inline_environment_definition_string': {'key': 'inlineEnvironmentDefinitionString', 'type': 'str'}, 'conf': {'key': 'conf', 'type': '{str}'}, 'compute': {'key': 'compute', 'type': 'str'}, 'resources': {'key': 'resources', 'type': 'AetherResourcesSetting'}, 'identity': {'key': 'identity', 'type': 'AetherIdentitySetting'}, } def __init__( self, **kwargs ): """ :keyword entry: :paramtype entry: ~flow.models.AetherEntrySetting :keyword files: :paramtype files: list[str] :keyword archives: :paramtype archives: list[str] :keyword jars: :paramtype jars: list[str] :keyword py_files: :paramtype py_files: list[str] :keyword driver_memory: :paramtype driver_memory: str :keyword driver_cores: :paramtype driver_cores: int :keyword executor_memory: :paramtype executor_memory: str :keyword executor_cores: :paramtype executor_cores: int :keyword number_executors: :paramtype number_executors: int :keyword environment_asset_id: :paramtype environment_asset_id: str :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword inline_environment_definition_string: :paramtype inline_environment_definition_string: str :keyword conf: Dictionary of :code:`<string>`. :paramtype conf: dict[str, str] :keyword compute: :paramtype compute: str :keyword resources: :paramtype resources: ~flow.models.AetherResourcesSetting :keyword identity: :paramtype identity: ~flow.models.AetherIdentitySetting """ super(AetherAmlSparkCloudSetting, self).__init__(**kwargs) self.entry = kwargs.get('entry', None) self.files = kwargs.get('files', None) self.archives = kwargs.get('archives', None) self.jars = kwargs.get('jars', None) self.py_files = kwargs.get('py_files', None) self.driver_memory = kwargs.get('driver_memory', None) self.driver_cores = kwargs.get('driver_cores', None) self.executor_memory = kwargs.get('executor_memory', None) self.executor_cores = kwargs.get('executor_cores', None) self.number_executors = kwargs.get('number_executors', None) self.environment_asset_id = kwargs.get('environment_asset_id', None) self.environment_variables = kwargs.get('environment_variables', None) self.inline_environment_definition_string = kwargs.get('inline_environment_definition_string', None) self.conf = kwargs.get('conf', None) self.compute = kwargs.get('compute', None) self.resources = kwargs.get('resources', None) self.identity = kwargs.get('identity', None) class AetherAPCloudConfiguration(msrest.serialization.Model): """AetherAPCloudConfiguration. :ivar referenced_ap_module_guid: :vartype referenced_ap_module_guid: str :ivar user_alias: :vartype user_alias: str :ivar aether_module_type: :vartype aether_module_type: str """ _attribute_map = { 'referenced_ap_module_guid': {'key': 'referencedAPModuleGuid', 'type': 'str'}, 'user_alias': {'key': 'userAlias', 'type': 'str'}, 'aether_module_type': {'key': 'aetherModuleType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword referenced_ap_module_guid: :paramtype referenced_ap_module_guid: str :keyword user_alias: :paramtype user_alias: str :keyword aether_module_type: :paramtype aether_module_type: str """ super(AetherAPCloudConfiguration, self).__init__(**kwargs) self.referenced_ap_module_guid = kwargs.get('referenced_ap_module_guid', None) self.user_alias = kwargs.get('user_alias', None) self.aether_module_type = kwargs.get('aether_module_type', None) class AetherArgumentAssignment(msrest.serialization.Model): """AetherArgumentAssignment. :ivar value_type: Possible values include: "Literal", "Parameter", "Input", "Output", "NestedList", "StringInterpolationList". :vartype value_type: str or ~flow.models.AetherArgumentValueType :ivar value: :vartype value: str :ivar nested_argument_list: :vartype nested_argument_list: list[~flow.models.AetherArgumentAssignment] :ivar string_interpolation_argument_list: :vartype string_interpolation_argument_list: list[~flow.models.AetherArgumentAssignment] """ _attribute_map = { 'value_type': {'key': 'valueType', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'nested_argument_list': {'key': 'nestedArgumentList', 'type': '[AetherArgumentAssignment]'}, 'string_interpolation_argument_list': {'key': 'stringInterpolationArgumentList', 'type': '[AetherArgumentAssignment]'}, } def __init__( self, **kwargs ): """ :keyword value_type: Possible values include: "Literal", "Parameter", "Input", "Output", "NestedList", "StringInterpolationList". :paramtype value_type: str or ~flow.models.AetherArgumentValueType :keyword value: :paramtype value: str :keyword nested_argument_list: :paramtype nested_argument_list: list[~flow.models.AetherArgumentAssignment] :keyword string_interpolation_argument_list: :paramtype string_interpolation_argument_list: list[~flow.models.AetherArgumentAssignment] """ super(AetherArgumentAssignment, self).__init__(**kwargs) self.value_type = kwargs.get('value_type', None) self.value = kwargs.get('value', None) self.nested_argument_list = kwargs.get('nested_argument_list', None) self.string_interpolation_argument_list = kwargs.get('string_interpolation_argument_list', None) class AetherAssetDefinition(msrest.serialization.Model): """AetherAssetDefinition. :ivar path: :vartype path: str :ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :vartype type: str or ~flow.models.AetherAssetType :ivar asset_id: :vartype asset_id: str :ivar initial_asset_id: :vartype initial_asset_id: str :ivar serialized_asset_id: :vartype serialized_asset_id: str """ _attribute_map = { 'path': {'key': 'path', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, 'initial_asset_id': {'key': 'initialAssetId', 'type': 'str'}, 'serialized_asset_id': {'key': 'serializedAssetId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword path: :paramtype path: str :keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :paramtype type: str or ~flow.models.AetherAssetType :keyword asset_id: :paramtype asset_id: str :keyword initial_asset_id: :paramtype initial_asset_id: str :keyword serialized_asset_id: :paramtype serialized_asset_id: str """ super(AetherAssetDefinition, self).__init__(**kwargs) self.path = kwargs.get('path', None) self.type = kwargs.get('type', None) self.asset_id = kwargs.get('asset_id', None) self.initial_asset_id = kwargs.get('initial_asset_id', None) self.serialized_asset_id = kwargs.get('serialized_asset_id', None) class AetherAssetOutputSettings(msrest.serialization.Model): """AetherAssetOutputSettings. :ivar path: :vartype path: str :ivar path_parameter_assignment: :vartype path_parameter_assignment: ~flow.models.AetherParameterAssignment :ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :vartype type: str or ~flow.models.AetherAssetType :ivar options: This is a dictionary. :vartype options: dict[str, str] :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AetherDataStoreMode :ivar name: :vartype name: str :ivar version: :vartype version: str """ _attribute_map = { 'path': {'key': 'path', 'type': 'str'}, 'path_parameter_assignment': {'key': 'PathParameterAssignment', 'type': 'AetherParameterAssignment'}, 'type': {'key': 'type', 'type': 'str'}, 'options': {'key': 'options', 'type': '{str}'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword path: :paramtype path: str :keyword path_parameter_assignment: :paramtype path_parameter_assignment: ~flow.models.AetherParameterAssignment :keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :paramtype type: str or ~flow.models.AetherAssetType :keyword options: This is a dictionary. :paramtype options: dict[str, str] :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode :keyword name: :paramtype name: str :keyword version: :paramtype version: str """ super(AetherAssetOutputSettings, self).__init__(**kwargs) self.path = kwargs.get('path', None) self.path_parameter_assignment = kwargs.get('path_parameter_assignment', None) self.type = kwargs.get('type', None) self.options = kwargs.get('options', None) self.data_store_mode = kwargs.get('data_store_mode', None) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) class AetherAutoFeaturizeConfiguration(msrest.serialization.Model): """AetherAutoFeaturizeConfiguration. :ivar featurization_config: :vartype featurization_config: ~flow.models.AetherFeaturizationSettings """ _attribute_map = { 'featurization_config': {'key': 'featurizationConfig', 'type': 'AetherFeaturizationSettings'}, } def __init__( self, **kwargs ): """ :keyword featurization_config: :paramtype featurization_config: ~flow.models.AetherFeaturizationSettings """ super(AetherAutoFeaturizeConfiguration, self).__init__(**kwargs) self.featurization_config = kwargs.get('featurization_config', None) class AetherAutoMLComponentConfiguration(msrest.serialization.Model): """AetherAutoMLComponentConfiguration. :ivar auto_train_config: :vartype auto_train_config: ~flow.models.AetherAutoTrainConfiguration :ivar auto_featurize_config: :vartype auto_featurize_config: ~flow.models.AetherAutoFeaturizeConfiguration """ _attribute_map = { 'auto_train_config': {'key': 'autoTrainConfig', 'type': 'AetherAutoTrainConfiguration'}, 'auto_featurize_config': {'key': 'autoFeaturizeConfig', 'type': 'AetherAutoFeaturizeConfiguration'}, } def __init__( self, **kwargs ): """ :keyword auto_train_config: :paramtype auto_train_config: ~flow.models.AetherAutoTrainConfiguration :keyword auto_featurize_config: :paramtype auto_featurize_config: ~flow.models.AetherAutoFeaturizeConfiguration """ super(AetherAutoMLComponentConfiguration, self).__init__(**kwargs) self.auto_train_config = kwargs.get('auto_train_config', None) self.auto_featurize_config = kwargs.get('auto_featurize_config', None) class AetherAutoTrainConfiguration(msrest.serialization.Model): """AetherAutoTrainConfiguration. :ivar general_settings: :vartype general_settings: ~flow.models.AetherGeneralSettings :ivar limit_settings: :vartype limit_settings: ~flow.models.AetherLimitSettings :ivar data_settings: :vartype data_settings: ~flow.models.AetherDataSettings :ivar forecasting_settings: :vartype forecasting_settings: ~flow.models.AetherForecastingSettings :ivar training_settings: :vartype training_settings: ~flow.models.AetherTrainingSettings :ivar sweep_settings: :vartype sweep_settings: ~flow.models.AetherSweepSettings :ivar image_model_settings: Dictionary of :code:`<any>`. :vartype image_model_settings: dict[str, any] :ivar properties: Dictionary of :code:`<string>`. :vartype properties: dict[str, str] :ivar compute_configuration: :vartype compute_configuration: ~flow.models.AetherComputeConfiguration :ivar resource_configurtion: :vartype resource_configurtion: ~flow.models.AetherResourceConfiguration :ivar environment_id: :vartype environment_id: str :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] """ _attribute_map = { 'general_settings': {'key': 'generalSettings', 'type': 'AetherGeneralSettings'}, 'limit_settings': {'key': 'limitSettings', 'type': 'AetherLimitSettings'}, 'data_settings': {'key': 'dataSettings', 'type': 'AetherDataSettings'}, 'forecasting_settings': {'key': 'forecastingSettings', 'type': 'AetherForecastingSettings'}, 'training_settings': {'key': 'trainingSettings', 'type': 'AetherTrainingSettings'}, 'sweep_settings': {'key': 'sweepSettings', 'type': 'AetherSweepSettings'}, 'image_model_settings': {'key': 'imageModelSettings', 'type': '{object}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'compute_configuration': {'key': 'computeConfiguration', 'type': 'AetherComputeConfiguration'}, 'resource_configurtion': {'key': 'resourceConfigurtion', 'type': 'AetherResourceConfiguration'}, 'environment_id': {'key': 'environmentId', 'type': 'str'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword general_settings: :paramtype general_settings: ~flow.models.AetherGeneralSettings :keyword limit_settings: :paramtype limit_settings: ~flow.models.AetherLimitSettings :keyword data_settings: :paramtype data_settings: ~flow.models.AetherDataSettings :keyword forecasting_settings: :paramtype forecasting_settings: ~flow.models.AetherForecastingSettings :keyword training_settings: :paramtype training_settings: ~flow.models.AetherTrainingSettings :keyword sweep_settings: :paramtype sweep_settings: ~flow.models.AetherSweepSettings :keyword image_model_settings: Dictionary of :code:`<any>`. :paramtype image_model_settings: dict[str, any] :keyword properties: Dictionary of :code:`<string>`. :paramtype properties: dict[str, str] :keyword compute_configuration: :paramtype compute_configuration: ~flow.models.AetherComputeConfiguration :keyword resource_configurtion: :paramtype resource_configurtion: ~flow.models.AetherResourceConfiguration :keyword environment_id: :paramtype environment_id: str :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] """ super(AetherAutoTrainConfiguration, self).__init__(**kwargs) self.general_settings = kwargs.get('general_settings', None) self.limit_settings = kwargs.get('limit_settings', None) self.data_settings = kwargs.get('data_settings', None) self.forecasting_settings = kwargs.get('forecasting_settings', None) self.training_settings = kwargs.get('training_settings', None) self.sweep_settings = kwargs.get('sweep_settings', None) self.image_model_settings = kwargs.get('image_model_settings', None) self.properties = kwargs.get('properties', None) self.compute_configuration = kwargs.get('compute_configuration', None) self.resource_configurtion = kwargs.get('resource_configurtion', None) self.environment_id = kwargs.get('environment_id', None) self.environment_variables = kwargs.get('environment_variables', None) class AetherAzureBlobReference(msrest.serialization.Model): """AetherAzureBlobReference. :ivar container: :vartype container: str :ivar sas_token: :vartype sas_token: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar path_type: Possible values include: "Unknown", "File", "Folder". :vartype path_type: str or ~flow.models.AetherFileBasedPathType :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'container': {'key': 'container', 'type': 'str'}, 'sas_token': {'key': 'sasToken', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'path_type': {'key': 'pathType', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword container: :paramtype container: str :keyword sas_token: :paramtype sas_token: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword path_type: Possible values include: "Unknown", "File", "Folder". :paramtype path_type: str or ~flow.models.AetherFileBasedPathType :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AetherAzureBlobReference, self).__init__(**kwargs) self.container = kwargs.get('container', None) self.sas_token = kwargs.get('sas_token', None) self.uri = kwargs.get('uri', None) self.account = kwargs.get('account', None) self.relative_path = kwargs.get('relative_path', None) self.path_type = kwargs.get('path_type', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) class AetherAzureDatabaseReference(msrest.serialization.Model): """AetherAzureDatabaseReference. :ivar server_uri: :vartype server_uri: str :ivar database_name: :vartype database_name: str :ivar table_name: :vartype table_name: str :ivar sql_query: :vartype sql_query: str :ivar stored_procedure_name: :vartype stored_procedure_name: str :ivar stored_procedure_parameters: :vartype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter] :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'server_uri': {'key': 'serverUri', 'type': 'str'}, 'database_name': {'key': 'databaseName', 'type': 'str'}, 'table_name': {'key': 'tableName', 'type': 'str'}, 'sql_query': {'key': 'sqlQuery', 'type': 'str'}, 'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[AetherStoredProcedureParameter]'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword server_uri: :paramtype server_uri: str :keyword database_name: :paramtype database_name: str :keyword table_name: :paramtype table_name: str :keyword sql_query: :paramtype sql_query: str :keyword stored_procedure_name: :paramtype stored_procedure_name: str :keyword stored_procedure_parameters: :paramtype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter] :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AetherAzureDatabaseReference, self).__init__(**kwargs) self.server_uri = kwargs.get('server_uri', None) self.database_name = kwargs.get('database_name', None) self.table_name = kwargs.get('table_name', None) self.sql_query = kwargs.get('sql_query', None) self.stored_procedure_name = kwargs.get('stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) class AetherAzureDataLakeGen2Reference(msrest.serialization.Model): """AetherAzureDataLakeGen2Reference. :ivar file_system_name: :vartype file_system_name: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar path_type: Possible values include: "Unknown", "File", "Folder". :vartype path_type: str or ~flow.models.AetherFileBasedPathType :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'file_system_name': {'key': 'fileSystemName', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'path_type': {'key': 'pathType', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword file_system_name: :paramtype file_system_name: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword path_type: Possible values include: "Unknown", "File", "Folder". :paramtype path_type: str or ~flow.models.AetherFileBasedPathType :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AetherAzureDataLakeGen2Reference, self).__init__(**kwargs) self.file_system_name = kwargs.get('file_system_name', None) self.uri = kwargs.get('uri', None) self.account = kwargs.get('account', None) self.relative_path = kwargs.get('relative_path', None) self.path_type = kwargs.get('path_type', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) class AetherAzureDataLakeReference(msrest.serialization.Model): """AetherAzureDataLakeReference. :ivar tenant: :vartype tenant: str :ivar subscription: :vartype subscription: str :ivar resource_group: :vartype resource_group: str :ivar data_lake_uri: :vartype data_lake_uri: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar path_type: Possible values include: "Unknown", "File", "Folder". :vartype path_type: str or ~flow.models.AetherFileBasedPathType :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'tenant': {'key': 'tenant', 'type': 'str'}, 'subscription': {'key': 'subscription', 'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, 'data_lake_uri': {'key': 'dataLakeUri', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'path_type': {'key': 'pathType', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword tenant: :paramtype tenant: str :keyword subscription: :paramtype subscription: str :keyword resource_group: :paramtype resource_group: str :keyword data_lake_uri: :paramtype data_lake_uri: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword path_type: Possible values include: "Unknown", "File", "Folder". :paramtype path_type: str or ~flow.models.AetherFileBasedPathType :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AetherAzureDataLakeReference, self).__init__(**kwargs) self.tenant = kwargs.get('tenant', None) self.subscription = kwargs.get('subscription', None) self.resource_group = kwargs.get('resource_group', None) self.data_lake_uri = kwargs.get('data_lake_uri', None) self.uri = kwargs.get('uri', None) self.account = kwargs.get('account', None) self.relative_path = kwargs.get('relative_path', None) self.path_type = kwargs.get('path_type', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) class AetherAzureFilesReference(msrest.serialization.Model): """AetherAzureFilesReference. :ivar share: :vartype share: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar path_type: Possible values include: "Unknown", "File", "Folder". :vartype path_type: str or ~flow.models.AetherFileBasedPathType :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'share': {'key': 'share', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'path_type': {'key': 'pathType', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword share: :paramtype share: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword path_type: Possible values include: "Unknown", "File", "Folder". :paramtype path_type: str or ~flow.models.AetherFileBasedPathType :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AetherAzureFilesReference, self).__init__(**kwargs) self.share = kwargs.get('share', None) self.uri = kwargs.get('uri', None) self.account = kwargs.get('account', None) self.relative_path = kwargs.get('relative_path', None) self.path_type = kwargs.get('path_type', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) class AetherBatchAiComputeInfo(msrest.serialization.Model): """AetherBatchAiComputeInfo. :ivar batch_ai_subscription_id: :vartype batch_ai_subscription_id: str :ivar batch_ai_resource_group: :vartype batch_ai_resource_group: str :ivar batch_ai_workspace_name: :vartype batch_ai_workspace_name: str :ivar cluster_name: :vartype cluster_name: str :ivar native_shared_directory: :vartype native_shared_directory: str """ _attribute_map = { 'batch_ai_subscription_id': {'key': 'batchAiSubscriptionId', 'type': 'str'}, 'batch_ai_resource_group': {'key': 'batchAiResourceGroup', 'type': 'str'}, 'batch_ai_workspace_name': {'key': 'batchAiWorkspaceName', 'type': 'str'}, 'cluster_name': {'key': 'clusterName', 'type': 'str'}, 'native_shared_directory': {'key': 'nativeSharedDirectory', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword batch_ai_subscription_id: :paramtype batch_ai_subscription_id: str :keyword batch_ai_resource_group: :paramtype batch_ai_resource_group: str :keyword batch_ai_workspace_name: :paramtype batch_ai_workspace_name: str :keyword cluster_name: :paramtype cluster_name: str :keyword native_shared_directory: :paramtype native_shared_directory: str """ super(AetherBatchAiComputeInfo, self).__init__(**kwargs) self.batch_ai_subscription_id = kwargs.get('batch_ai_subscription_id', None) self.batch_ai_resource_group = kwargs.get('batch_ai_resource_group', None) self.batch_ai_workspace_name = kwargs.get('batch_ai_workspace_name', None) self.cluster_name = kwargs.get('cluster_name', None) self.native_shared_directory = kwargs.get('native_shared_directory', None) class AetherBuildArtifactInfo(msrest.serialization.Model): """AetherBuildArtifactInfo. :ivar type: Possible values include: "CloudBuild", "Vso", "VsoGit". :vartype type: str or ~flow.models.AetherBuildSourceType :ivar cloud_build_drop_path_info: :vartype cloud_build_drop_path_info: ~flow.models.AetherCloudBuildDropPathInfo :ivar vso_build_artifact_info: :vartype vso_build_artifact_info: ~flow.models.AetherVsoBuildArtifactInfo """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'cloud_build_drop_path_info': {'key': 'cloudBuildDropPathInfo', 'type': 'AetherCloudBuildDropPathInfo'}, 'vso_build_artifact_info': {'key': 'vsoBuildArtifactInfo', 'type': 'AetherVsoBuildArtifactInfo'}, } def __init__( self, **kwargs ): """ :keyword type: Possible values include: "CloudBuild", "Vso", "VsoGit". :paramtype type: str or ~flow.models.AetherBuildSourceType :keyword cloud_build_drop_path_info: :paramtype cloud_build_drop_path_info: ~flow.models.AetherCloudBuildDropPathInfo :keyword vso_build_artifact_info: :paramtype vso_build_artifact_info: ~flow.models.AetherVsoBuildArtifactInfo """ super(AetherBuildArtifactInfo, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.cloud_build_drop_path_info = kwargs.get('cloud_build_drop_path_info', None) self.vso_build_artifact_info = kwargs.get('vso_build_artifact_info', None) class AetherCloudBuildDropPathInfo(msrest.serialization.Model): """AetherCloudBuildDropPathInfo. :ivar build_info: :vartype build_info: ~flow.models.AetherCloudBuildInfo :ivar root: :vartype root: str """ _attribute_map = { 'build_info': {'key': 'buildInfo', 'type': 'AetherCloudBuildInfo'}, 'root': {'key': 'root', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword build_info: :paramtype build_info: ~flow.models.AetherCloudBuildInfo :keyword root: :paramtype root: str """ super(AetherCloudBuildDropPathInfo, self).__init__(**kwargs) self.build_info = kwargs.get('build_info', None) self.root = kwargs.get('root', None) class AetherCloudBuildInfo(msrest.serialization.Model): """AetherCloudBuildInfo. :ivar queue_info: :vartype queue_info: ~flow.models.AetherCloudBuildQueueInfo :ivar build_id: :vartype build_id: str :ivar drop_url: :vartype drop_url: str """ _attribute_map = { 'queue_info': {'key': 'queueInfo', 'type': 'AetherCloudBuildQueueInfo'}, 'build_id': {'key': 'buildId', 'type': 'str'}, 'drop_url': {'key': 'dropUrl', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword queue_info: :paramtype queue_info: ~flow.models.AetherCloudBuildQueueInfo :keyword build_id: :paramtype build_id: str :keyword drop_url: :paramtype drop_url: str """ super(AetherCloudBuildInfo, self).__init__(**kwargs) self.queue_info = kwargs.get('queue_info', None) self.build_id = kwargs.get('build_id', None) self.drop_url = kwargs.get('drop_url', None) class AetherCloudBuildQueueInfo(msrest.serialization.Model): """AetherCloudBuildQueueInfo. :ivar build_queue: :vartype build_queue: str :ivar build_role: :vartype build_role: str """ _attribute_map = { 'build_queue': {'key': 'buildQueue', 'type': 'str'}, 'build_role': {'key': 'buildRole', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword build_queue: :paramtype build_queue: str :keyword build_role: :paramtype build_role: str """ super(AetherCloudBuildQueueInfo, self).__init__(**kwargs) self.build_queue = kwargs.get('build_queue', None) self.build_role = kwargs.get('build_role', None) class AetherCloudPrioritySetting(msrest.serialization.Model): """AetherCloudPrioritySetting. :ivar scope_priority: :vartype scope_priority: ~flow.models.AetherPriorityConfiguration :ivar aml_compute_priority: :vartype aml_compute_priority: ~flow.models.AetherPriorityConfiguration :ivar itp_priority: :vartype itp_priority: ~flow.models.AetherPriorityConfiguration :ivar singularity_priority: :vartype singularity_priority: ~flow.models.AetherPriorityConfiguration """ _attribute_map = { 'scope_priority': {'key': 'scopePriority', 'type': 'AetherPriorityConfiguration'}, 'aml_compute_priority': {'key': 'AmlComputePriority', 'type': 'AetherPriorityConfiguration'}, 'itp_priority': {'key': 'ItpPriority', 'type': 'AetherPriorityConfiguration'}, 'singularity_priority': {'key': 'SingularityPriority', 'type': 'AetherPriorityConfiguration'}, } def __init__( self, **kwargs ): """ :keyword scope_priority: :paramtype scope_priority: ~flow.models.AetherPriorityConfiguration :keyword aml_compute_priority: :paramtype aml_compute_priority: ~flow.models.AetherPriorityConfiguration :keyword itp_priority: :paramtype itp_priority: ~flow.models.AetherPriorityConfiguration :keyword singularity_priority: :paramtype singularity_priority: ~flow.models.AetherPriorityConfiguration """ super(AetherCloudPrioritySetting, self).__init__(**kwargs) self.scope_priority = kwargs.get('scope_priority', None) self.aml_compute_priority = kwargs.get('aml_compute_priority', None) self.itp_priority = kwargs.get('itp_priority', None) self.singularity_priority = kwargs.get('singularity_priority', None) class AetherCloudSettings(msrest.serialization.Model): """AetherCloudSettings. :ivar linked_settings: :vartype linked_settings: list[~flow.models.AetherParameterAssignment] :ivar priority_config: :vartype priority_config: ~flow.models.AetherPriorityConfiguration :ivar hdi_run_config: :vartype hdi_run_config: ~flow.models.AetherHdiRunConfiguration :ivar sub_graph_config: :vartype sub_graph_config: ~flow.models.AetherSubGraphConfiguration :ivar auto_ml_component_config: :vartype auto_ml_component_config: ~flow.models.AetherAutoMLComponentConfiguration :ivar ap_cloud_config: :vartype ap_cloud_config: ~flow.models.AetherAPCloudConfiguration :ivar scope_cloud_config: :vartype scope_cloud_config: ~flow.models.AetherScopeCloudConfiguration :ivar es_cloud_config: :vartype es_cloud_config: ~flow.models.AetherEsCloudConfiguration :ivar data_transfer_cloud_config: :vartype data_transfer_cloud_config: ~flow.models.AetherDataTransferCloudConfiguration :ivar aml_spark_cloud_setting: :vartype aml_spark_cloud_setting: ~flow.models.AetherAmlSparkCloudSetting :ivar data_transfer_v2_cloud_setting: :vartype data_transfer_v2_cloud_setting: ~flow.models.AetherDataTransferV2CloudSetting """ _attribute_map = { 'linked_settings': {'key': 'linkedSettings', 'type': '[AetherParameterAssignment]'}, 'priority_config': {'key': 'priorityConfig', 'type': 'AetherPriorityConfiguration'}, 'hdi_run_config': {'key': 'hdiRunConfig', 'type': 'AetherHdiRunConfiguration'}, 'sub_graph_config': {'key': 'subGraphConfig', 'type': 'AetherSubGraphConfiguration'}, 'auto_ml_component_config': {'key': 'autoMLComponentConfig', 'type': 'AetherAutoMLComponentConfiguration'}, 'ap_cloud_config': {'key': 'apCloudConfig', 'type': 'AetherAPCloudConfiguration'}, 'scope_cloud_config': {'key': 'scopeCloudConfig', 'type': 'AetherScopeCloudConfiguration'}, 'es_cloud_config': {'key': 'esCloudConfig', 'type': 'AetherEsCloudConfiguration'}, 'data_transfer_cloud_config': {'key': 'dataTransferCloudConfig', 'type': 'AetherDataTransferCloudConfiguration'}, 'aml_spark_cloud_setting': {'key': 'amlSparkCloudSetting', 'type': 'AetherAmlSparkCloudSetting'}, 'data_transfer_v2_cloud_setting': {'key': 'dataTransferV2CloudSetting', 'type': 'AetherDataTransferV2CloudSetting'}, } def __init__( self, **kwargs ): """ :keyword linked_settings: :paramtype linked_settings: list[~flow.models.AetherParameterAssignment] :keyword priority_config: :paramtype priority_config: ~flow.models.AetherPriorityConfiguration :keyword hdi_run_config: :paramtype hdi_run_config: ~flow.models.AetherHdiRunConfiguration :keyword sub_graph_config: :paramtype sub_graph_config: ~flow.models.AetherSubGraphConfiguration :keyword auto_ml_component_config: :paramtype auto_ml_component_config: ~flow.models.AetherAutoMLComponentConfiguration :keyword ap_cloud_config: :paramtype ap_cloud_config: ~flow.models.AetherAPCloudConfiguration :keyword scope_cloud_config: :paramtype scope_cloud_config: ~flow.models.AetherScopeCloudConfiguration :keyword es_cloud_config: :paramtype es_cloud_config: ~flow.models.AetherEsCloudConfiguration :keyword data_transfer_cloud_config: :paramtype data_transfer_cloud_config: ~flow.models.AetherDataTransferCloudConfiguration :keyword aml_spark_cloud_setting: :paramtype aml_spark_cloud_setting: ~flow.models.AetherAmlSparkCloudSetting :keyword data_transfer_v2_cloud_setting: :paramtype data_transfer_v2_cloud_setting: ~flow.models.AetherDataTransferV2CloudSetting """ super(AetherCloudSettings, self).__init__(**kwargs) self.linked_settings = kwargs.get('linked_settings', None) self.priority_config = kwargs.get('priority_config', None) self.hdi_run_config = kwargs.get('hdi_run_config', None) self.sub_graph_config = kwargs.get('sub_graph_config', None) self.auto_ml_component_config = kwargs.get('auto_ml_component_config', None) self.ap_cloud_config = kwargs.get('ap_cloud_config', None) self.scope_cloud_config = kwargs.get('scope_cloud_config', None) self.es_cloud_config = kwargs.get('es_cloud_config', None) self.data_transfer_cloud_config = kwargs.get('data_transfer_cloud_config', None) self.aml_spark_cloud_setting = kwargs.get('aml_spark_cloud_setting', None) self.data_transfer_v2_cloud_setting = kwargs.get('data_transfer_v2_cloud_setting', None) class AetherColumnTransformer(msrest.serialization.Model): """AetherColumnTransformer. :ivar fields: :vartype fields: list[str] :ivar parameters: Anything. :vartype parameters: any """ _attribute_map = { 'fields': {'key': 'fields', 'type': '[str]'}, 'parameters': {'key': 'parameters', 'type': 'object'}, } def __init__( self, **kwargs ): """ :keyword fields: :paramtype fields: list[str] :keyword parameters: Anything. :paramtype parameters: any """ super(AetherColumnTransformer, self).__init__(**kwargs) self.fields = kwargs.get('fields', None) self.parameters = kwargs.get('parameters', None) class AetherComputeConfiguration(msrest.serialization.Model): """AetherComputeConfiguration. :ivar target: :vartype target: str :ivar instance_count: :vartype instance_count: int :ivar is_local: :vartype is_local: bool :ivar location: :vartype location: str :ivar is_clusterless: :vartype is_clusterless: bool :ivar instance_type: :vartype instance_type: str :ivar properties: Dictionary of :code:`<any>`. :vartype properties: dict[str, any] :ivar is_preemptable: :vartype is_preemptable: bool """ _attribute_map = { 'target': {'key': 'target', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'is_local': {'key': 'isLocal', 'type': 'bool'}, 'location': {'key': 'location', 'type': 'str'}, 'is_clusterless': {'key': 'isClusterless', 'type': 'bool'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{object}'}, 'is_preemptable': {'key': 'isPreemptable', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword target: :paramtype target: str :keyword instance_count: :paramtype instance_count: int :keyword is_local: :paramtype is_local: bool :keyword location: :paramtype location: str :keyword is_clusterless: :paramtype is_clusterless: bool :keyword instance_type: :paramtype instance_type: str :keyword properties: Dictionary of :code:`<any>`. :paramtype properties: dict[str, any] :keyword is_preemptable: :paramtype is_preemptable: bool """ super(AetherComputeConfiguration, self).__init__(**kwargs) self.target = kwargs.get('target', None) self.instance_count = kwargs.get('instance_count', None) self.is_local = kwargs.get('is_local', None) self.location = kwargs.get('location', None) self.is_clusterless = kwargs.get('is_clusterless', None) self.instance_type = kwargs.get('instance_type', None) self.properties = kwargs.get('properties', None) self.is_preemptable = kwargs.get('is_preemptable', None) class AetherComputeSetting(msrest.serialization.Model): """AetherComputeSetting. :ivar name: :vartype name: str :ivar compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker", "Databricks", "Aisc". :vartype compute_type: str or ~flow.models.AetherComputeType :ivar batch_ai_compute_info: :vartype batch_ai_compute_info: ~flow.models.AetherBatchAiComputeInfo :ivar remote_docker_compute_info: :vartype remote_docker_compute_info: ~flow.models.AetherRemoteDockerComputeInfo :ivar hdi_cluster_compute_info: :vartype hdi_cluster_compute_info: ~flow.models.AetherHdiClusterComputeInfo :ivar mlc_compute_info: :vartype mlc_compute_info: ~flow.models.AetherMlcComputeInfo :ivar databricks_compute_info: :vartype databricks_compute_info: ~flow.models.AetherDatabricksComputeInfo """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'batch_ai_compute_info': {'key': 'batchAiComputeInfo', 'type': 'AetherBatchAiComputeInfo'}, 'remote_docker_compute_info': {'key': 'remoteDockerComputeInfo', 'type': 'AetherRemoteDockerComputeInfo'}, 'hdi_cluster_compute_info': {'key': 'hdiClusterComputeInfo', 'type': 'AetherHdiClusterComputeInfo'}, 'mlc_compute_info': {'key': 'mlcComputeInfo', 'type': 'AetherMlcComputeInfo'}, 'databricks_compute_info': {'key': 'databricksComputeInfo', 'type': 'AetherDatabricksComputeInfo'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker", "Databricks", "Aisc". :paramtype compute_type: str or ~flow.models.AetherComputeType :keyword batch_ai_compute_info: :paramtype batch_ai_compute_info: ~flow.models.AetherBatchAiComputeInfo :keyword remote_docker_compute_info: :paramtype remote_docker_compute_info: ~flow.models.AetherRemoteDockerComputeInfo :keyword hdi_cluster_compute_info: :paramtype hdi_cluster_compute_info: ~flow.models.AetherHdiClusterComputeInfo :keyword mlc_compute_info: :paramtype mlc_compute_info: ~flow.models.AetherMlcComputeInfo :keyword databricks_compute_info: :paramtype databricks_compute_info: ~flow.models.AetherDatabricksComputeInfo """ super(AetherComputeSetting, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.compute_type = kwargs.get('compute_type', None) self.batch_ai_compute_info = kwargs.get('batch_ai_compute_info', None) self.remote_docker_compute_info = kwargs.get('remote_docker_compute_info', None) self.hdi_cluster_compute_info = kwargs.get('hdi_cluster_compute_info', None) self.mlc_compute_info = kwargs.get('mlc_compute_info', None) self.databricks_compute_info = kwargs.get('databricks_compute_info', None) class AetherControlInput(msrest.serialization.Model): """AetherControlInput. :ivar name: :vartype name: str :ivar default_value: Possible values include: "None", "False", "True", "Skipped". :vartype default_value: str or ~flow.models.AetherControlInputValue """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword default_value: Possible values include: "None", "False", "True", "Skipped". :paramtype default_value: str or ~flow.models.AetherControlInputValue """ super(AetherControlInput, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.default_value = kwargs.get('default_value', None) class AetherControlOutput(msrest.serialization.Model): """AetherControlOutput. :ivar name: :vartype name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str """ super(AetherControlOutput, self).__init__(**kwargs) self.name = kwargs.get('name', None) class AetherCopyDataTask(msrest.serialization.Model): """AetherCopyDataTask. :ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :vartype data_copy_mode: str or ~flow.models.AetherDataCopyMode """ _attribute_map = { 'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :paramtype data_copy_mode: str or ~flow.models.AetherDataCopyMode """ super(AetherCopyDataTask, self).__init__(**kwargs) self.data_copy_mode = kwargs.get('data_copy_mode', None) class AetherCosmosReference(msrest.serialization.Model): """AetherCosmosReference. :ivar cluster: :vartype cluster: str :ivar vc: :vartype vc: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'cluster': {'key': 'cluster', 'type': 'str'}, 'vc': {'key': 'vc', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword cluster: :paramtype cluster: str :keyword vc: :paramtype vc: str :keyword relative_path: :paramtype relative_path: str """ super(AetherCosmosReference, self).__init__(**kwargs) self.cluster = kwargs.get('cluster', None) self.vc = kwargs.get('vc', None) self.relative_path = kwargs.get('relative_path', None) class AetherCreatedBy(msrest.serialization.Model): """AetherCreatedBy. :ivar user_object_id: :vartype user_object_id: str :ivar user_tenant_id: :vartype user_tenant_id: str :ivar user_name: :vartype user_name: str :ivar puid: :vartype puid: str :ivar iss: :vartype iss: str :ivar idp: :vartype idp: str :ivar altsec_id: :vartype altsec_id: str :ivar source_ip: :vartype source_ip: str :ivar skip_registry_private_link_check: :vartype skip_registry_private_link_check: bool """ _attribute_map = { 'user_object_id': {'key': 'userObjectId', 'type': 'str'}, 'user_tenant_id': {'key': 'userTenantId', 'type': 'str'}, 'user_name': {'key': 'userName', 'type': 'str'}, 'puid': {'key': 'puid', 'type': 'str'}, 'iss': {'key': 'iss', 'type': 'str'}, 'idp': {'key': 'idp', 'type': 'str'}, 'altsec_id': {'key': 'altsecId', 'type': 'str'}, 'source_ip': {'key': 'sourceIp', 'type': 'str'}, 'skip_registry_private_link_check': {'key': 'skipRegistryPrivateLinkCheck', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword user_object_id: :paramtype user_object_id: str :keyword user_tenant_id: :paramtype user_tenant_id: str :keyword user_name: :paramtype user_name: str :keyword puid: :paramtype puid: str :keyword iss: :paramtype iss: str :keyword idp: :paramtype idp: str :keyword altsec_id: :paramtype altsec_id: str :keyword source_ip: :paramtype source_ip: str :keyword skip_registry_private_link_check: :paramtype skip_registry_private_link_check: bool """ super(AetherCreatedBy, self).__init__(**kwargs) self.user_object_id = kwargs.get('user_object_id', None) self.user_tenant_id = kwargs.get('user_tenant_id', None) self.user_name = kwargs.get('user_name', None) self.puid = kwargs.get('puid', None) self.iss = kwargs.get('iss', None) self.idp = kwargs.get('idp', None) self.altsec_id = kwargs.get('altsec_id', None) self.source_ip = kwargs.get('source_ip', None) self.skip_registry_private_link_check = kwargs.get('skip_registry_private_link_check', None) class AetherCustomReference(msrest.serialization.Model): """AetherCustomReference. :ivar aml_data_store_name: :vartype aml_data_store_name: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword aml_data_store_name: :paramtype aml_data_store_name: str :keyword relative_path: :paramtype relative_path: str """ super(AetherCustomReference, self).__init__(**kwargs) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) self.relative_path = kwargs.get('relative_path', None) class AetherDatabaseSink(msrest.serialization.Model): """AetherDatabaseSink. :ivar connection: :vartype connection: str :ivar table: :vartype table: str """ _attribute_map = { 'connection': {'key': 'connection', 'type': 'str'}, 'table': {'key': 'table', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword connection: :paramtype connection: str :keyword table: :paramtype table: str """ super(AetherDatabaseSink, self).__init__(**kwargs) self.connection = kwargs.get('connection', None) self.table = kwargs.get('table', None) class AetherDatabaseSource(msrest.serialization.Model): """AetherDatabaseSource. :ivar connection: :vartype connection: str :ivar query: :vartype query: str :ivar stored_procedure_name: :vartype stored_procedure_name: str :ivar stored_procedure_parameters: :vartype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter] """ _attribute_map = { 'connection': {'key': 'connection', 'type': 'str'}, 'query': {'key': 'query', 'type': 'str'}, 'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[AetherStoredProcedureParameter]'}, } def __init__( self, **kwargs ): """ :keyword connection: :paramtype connection: str :keyword query: :paramtype query: str :keyword stored_procedure_name: :paramtype stored_procedure_name: str :keyword stored_procedure_parameters: :paramtype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter] """ super(AetherDatabaseSource, self).__init__(**kwargs) self.connection = kwargs.get('connection', None) self.query = kwargs.get('query', None) self.stored_procedure_name = kwargs.get('stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) class AetherDatabricksComputeInfo(msrest.serialization.Model): """AetherDatabricksComputeInfo. :ivar existing_cluster_id: :vartype existing_cluster_id: str """ _attribute_map = { 'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword existing_cluster_id: :paramtype existing_cluster_id: str """ super(AetherDatabricksComputeInfo, self).__init__(**kwargs) self.existing_cluster_id = kwargs.get('existing_cluster_id', None) class AetherDataLocation(msrest.serialization.Model): """AetherDataLocation. :ivar storage_type: Possible values include: "Cosmos", "AzureBlob", "Artifact", "Snapshot", "SavedAmlDataset", "Asset". :vartype storage_type: str or ~flow.models.AetherDataLocationStorageType :ivar storage_id: :vartype storage_id: str :ivar uri: :vartype uri: str :ivar data_store_name: :vartype data_store_name: str :ivar data_reference: :vartype data_reference: ~flow.models.AetherDataReference :ivar aml_dataset: :vartype aml_dataset: ~flow.models.AetherAmlDataset :ivar asset_definition: :vartype asset_definition: ~flow.models.AetherAssetDefinition :ivar is_compliant: :vartype is_compliant: bool :ivar reuse_calculation_fields: :vartype reuse_calculation_fields: ~flow.models.AetherDataLocationReuseCalculationFields """ _attribute_map = { 'storage_type': {'key': 'storageType', 'type': 'str'}, 'storage_id': {'key': 'storageId', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_reference': {'key': 'dataReference', 'type': 'AetherDataReference'}, 'aml_dataset': {'key': 'amlDataset', 'type': 'AetherAmlDataset'}, 'asset_definition': {'key': 'assetDefinition', 'type': 'AetherAssetDefinition'}, 'is_compliant': {'key': 'isCompliant', 'type': 'bool'}, 'reuse_calculation_fields': {'key': 'reuseCalculationFields', 'type': 'AetherDataLocationReuseCalculationFields'}, } def __init__( self, **kwargs ): """ :keyword storage_type: Possible values include: "Cosmos", "AzureBlob", "Artifact", "Snapshot", "SavedAmlDataset", "Asset". :paramtype storage_type: str or ~flow.models.AetherDataLocationStorageType :keyword storage_id: :paramtype storage_id: str :keyword uri: :paramtype uri: str :keyword data_store_name: :paramtype data_store_name: str :keyword data_reference: :paramtype data_reference: ~flow.models.AetherDataReference :keyword aml_dataset: :paramtype aml_dataset: ~flow.models.AetherAmlDataset :keyword asset_definition: :paramtype asset_definition: ~flow.models.AetherAssetDefinition :keyword is_compliant: :paramtype is_compliant: bool :keyword reuse_calculation_fields: :paramtype reuse_calculation_fields: ~flow.models.AetherDataLocationReuseCalculationFields """ super(AetherDataLocation, self).__init__(**kwargs) self.storage_type = kwargs.get('storage_type', None) self.storage_id = kwargs.get('storage_id', None) self.uri = kwargs.get('uri', None) self.data_store_name = kwargs.get('data_store_name', None) self.data_reference = kwargs.get('data_reference', None) self.aml_dataset = kwargs.get('aml_dataset', None) self.asset_definition = kwargs.get('asset_definition', None) self.is_compliant = kwargs.get('is_compliant', None) self.reuse_calculation_fields = kwargs.get('reuse_calculation_fields', None) class AetherDataLocationReuseCalculationFields(msrest.serialization.Model): """AetherDataLocationReuseCalculationFields. :ivar data_store_name: :vartype data_store_name: str :ivar relative_path: :vartype relative_path: str :ivar data_experiment_id: :vartype data_experiment_id: str """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'data_experiment_id': {'key': 'dataExperimentId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword relative_path: :paramtype relative_path: str :keyword data_experiment_id: :paramtype data_experiment_id: str """ super(AetherDataLocationReuseCalculationFields, self).__init__(**kwargs) self.data_store_name = kwargs.get('data_store_name', None) self.relative_path = kwargs.get('relative_path', None) self.data_experiment_id = kwargs.get('data_experiment_id', None) class AetherDataPath(msrest.serialization.Model): """AetherDataPath. :ivar data_store_name: :vartype data_store_name: str :ivar relative_path: :vartype relative_path: str :ivar sql_data_path: :vartype sql_data_path: ~flow.models.AetherSqlDataPath """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'sql_data_path': {'key': 'sqlDataPath', 'type': 'AetherSqlDataPath'}, } def __init__( self, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword relative_path: :paramtype relative_path: str :keyword sql_data_path: :paramtype sql_data_path: ~flow.models.AetherSqlDataPath """ super(AetherDataPath, self).__init__(**kwargs) self.data_store_name = kwargs.get('data_store_name', None) self.relative_path = kwargs.get('relative_path', None) self.sql_data_path = kwargs.get('sql_data_path', None) class AetherDataReference(msrest.serialization.Model): """AetherDataReference. :ivar type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "Cosmos", "PhillyHdfs", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :vartype type: str or ~flow.models.AetherDataReferenceType :ivar azure_blob_reference: :vartype azure_blob_reference: ~flow.models.AetherAzureBlobReference :ivar azure_data_lake_reference: :vartype azure_data_lake_reference: ~flow.models.AetherAzureDataLakeReference :ivar azure_files_reference: :vartype azure_files_reference: ~flow.models.AetherAzureFilesReference :ivar cosmos_reference: :vartype cosmos_reference: ~flow.models.AetherCosmosReference :ivar philly_hdfs_reference: :vartype philly_hdfs_reference: ~flow.models.AetherPhillyHdfsReference :ivar azure_sql_database_reference: :vartype azure_sql_database_reference: ~flow.models.AetherAzureDatabaseReference :ivar azure_postgres_database_reference: :vartype azure_postgres_database_reference: ~flow.models.AetherAzureDatabaseReference :ivar azure_data_lake_gen2_reference: :vartype azure_data_lake_gen2_reference: ~flow.models.AetherAzureDataLakeGen2Reference :ivar dbfs_reference: :vartype dbfs_reference: ~flow.models.AetherDBFSReference :ivar azure_my_sql_database_reference: :vartype azure_my_sql_database_reference: ~flow.models.AetherAzureDatabaseReference :ivar custom_reference: :vartype custom_reference: ~flow.models.AetherCustomReference :ivar hdfs_reference: :vartype hdfs_reference: ~flow.models.AetherHdfsReference """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'azure_blob_reference': {'key': 'azureBlobReference', 'type': 'AetherAzureBlobReference'}, 'azure_data_lake_reference': {'key': 'azureDataLakeReference', 'type': 'AetherAzureDataLakeReference'}, 'azure_files_reference': {'key': 'azureFilesReference', 'type': 'AetherAzureFilesReference'}, 'cosmos_reference': {'key': 'cosmosReference', 'type': 'AetherCosmosReference'}, 'philly_hdfs_reference': {'key': 'phillyHdfsReference', 'type': 'AetherPhillyHdfsReference'}, 'azure_sql_database_reference': {'key': 'azureSqlDatabaseReference', 'type': 'AetherAzureDatabaseReference'}, 'azure_postgres_database_reference': {'key': 'azurePostgresDatabaseReference', 'type': 'AetherAzureDatabaseReference'}, 'azure_data_lake_gen2_reference': {'key': 'azureDataLakeGen2Reference', 'type': 'AetherAzureDataLakeGen2Reference'}, 'dbfs_reference': {'key': 'dbfsReference', 'type': 'AetherDBFSReference'}, 'azure_my_sql_database_reference': {'key': 'azureMySqlDatabaseReference', 'type': 'AetherAzureDatabaseReference'}, 'custom_reference': {'key': 'customReference', 'type': 'AetherCustomReference'}, 'hdfs_reference': {'key': 'hdfsReference', 'type': 'AetherHdfsReference'}, } def __init__( self, **kwargs ): """ :keyword type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "Cosmos", "PhillyHdfs", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :paramtype type: str or ~flow.models.AetherDataReferenceType :keyword azure_blob_reference: :paramtype azure_blob_reference: ~flow.models.AetherAzureBlobReference :keyword azure_data_lake_reference: :paramtype azure_data_lake_reference: ~flow.models.AetherAzureDataLakeReference :keyword azure_files_reference: :paramtype azure_files_reference: ~flow.models.AetherAzureFilesReference :keyword cosmos_reference: :paramtype cosmos_reference: ~flow.models.AetherCosmosReference :keyword philly_hdfs_reference: :paramtype philly_hdfs_reference: ~flow.models.AetherPhillyHdfsReference :keyword azure_sql_database_reference: :paramtype azure_sql_database_reference: ~flow.models.AetherAzureDatabaseReference :keyword azure_postgres_database_reference: :paramtype azure_postgres_database_reference: ~flow.models.AetherAzureDatabaseReference :keyword azure_data_lake_gen2_reference: :paramtype azure_data_lake_gen2_reference: ~flow.models.AetherAzureDataLakeGen2Reference :keyword dbfs_reference: :paramtype dbfs_reference: ~flow.models.AetherDBFSReference :keyword azure_my_sql_database_reference: :paramtype azure_my_sql_database_reference: ~flow.models.AetherAzureDatabaseReference :keyword custom_reference: :paramtype custom_reference: ~flow.models.AetherCustomReference :keyword hdfs_reference: :paramtype hdfs_reference: ~flow.models.AetherHdfsReference """ super(AetherDataReference, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.azure_blob_reference = kwargs.get('azure_blob_reference', None) self.azure_data_lake_reference = kwargs.get('azure_data_lake_reference', None) self.azure_files_reference = kwargs.get('azure_files_reference', None) self.cosmos_reference = kwargs.get('cosmos_reference', None) self.philly_hdfs_reference = kwargs.get('philly_hdfs_reference', None) self.azure_sql_database_reference = kwargs.get('azure_sql_database_reference', None) self.azure_postgres_database_reference = kwargs.get('azure_postgres_database_reference', None) self.azure_data_lake_gen2_reference = kwargs.get('azure_data_lake_gen2_reference', None) self.dbfs_reference = kwargs.get('dbfs_reference', None) self.azure_my_sql_database_reference = kwargs.get('azure_my_sql_database_reference', None) self.custom_reference = kwargs.get('custom_reference', None) self.hdfs_reference = kwargs.get('hdfs_reference', None) class AetherDataSetDefinition(msrest.serialization.Model): """AetherDataSetDefinition. :ivar data_type_short_name: :vartype data_type_short_name: str :ivar parameter_name: :vartype parameter_name: str :ivar value: :vartype value: ~flow.models.AetherDataSetDefinitionValue """ _attribute_map = { 'data_type_short_name': {'key': 'dataTypeShortName', 'type': 'str'}, 'parameter_name': {'key': 'parameterName', 'type': 'str'}, 'value': {'key': 'value', 'type': 'AetherDataSetDefinitionValue'}, } def __init__( self, **kwargs ): """ :keyword data_type_short_name: :paramtype data_type_short_name: str :keyword parameter_name: :paramtype parameter_name: str :keyword value: :paramtype value: ~flow.models.AetherDataSetDefinitionValue """ super(AetherDataSetDefinition, self).__init__(**kwargs) self.data_type_short_name = kwargs.get('data_type_short_name', None) self.parameter_name = kwargs.get('parameter_name', None) self.value = kwargs.get('value', None) class AetherDataSetDefinitionValue(msrest.serialization.Model): """AetherDataSetDefinitionValue. :ivar literal_value: :vartype literal_value: ~flow.models.AetherDataPath :ivar data_set_reference: :vartype data_set_reference: ~flow.models.AetherRegisteredDataSetReference :ivar saved_data_set_reference: :vartype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference :ivar asset_definition: :vartype asset_definition: ~flow.models.AetherAssetDefinition """ _attribute_map = { 'literal_value': {'key': 'literalValue', 'type': 'AetherDataPath'}, 'data_set_reference': {'key': 'dataSetReference', 'type': 'AetherRegisteredDataSetReference'}, 'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'AetherSavedDataSetReference'}, 'asset_definition': {'key': 'assetDefinition', 'type': 'AetherAssetDefinition'}, } def __init__( self, **kwargs ): """ :keyword literal_value: :paramtype literal_value: ~flow.models.AetherDataPath :keyword data_set_reference: :paramtype data_set_reference: ~flow.models.AetherRegisteredDataSetReference :keyword saved_data_set_reference: :paramtype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference :keyword asset_definition: :paramtype asset_definition: ~flow.models.AetherAssetDefinition """ super(AetherDataSetDefinitionValue, self).__init__(**kwargs) self.literal_value = kwargs.get('literal_value', None) self.data_set_reference = kwargs.get('data_set_reference', None) self.saved_data_set_reference = kwargs.get('saved_data_set_reference', None) self.asset_definition = kwargs.get('asset_definition', None) class AetherDatasetOutput(msrest.serialization.Model): """AetherDatasetOutput. :ivar dataset_type: Possible values include: "File", "Tabular". :vartype dataset_type: str or ~flow.models.AetherDatasetType :ivar dataset_registration: :vartype dataset_registration: ~flow.models.AetherDatasetRegistration :ivar dataset_output_options: :vartype dataset_output_options: ~flow.models.AetherDatasetOutputOptions """ _attribute_map = { 'dataset_type': {'key': 'datasetType', 'type': 'str'}, 'dataset_registration': {'key': 'datasetRegistration', 'type': 'AetherDatasetRegistration'}, 'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'AetherDatasetOutputOptions'}, } def __init__( self, **kwargs ): """ :keyword dataset_type: Possible values include: "File", "Tabular". :paramtype dataset_type: str or ~flow.models.AetherDatasetType :keyword dataset_registration: :paramtype dataset_registration: ~flow.models.AetherDatasetRegistration :keyword dataset_output_options: :paramtype dataset_output_options: ~flow.models.AetherDatasetOutputOptions """ super(AetherDatasetOutput, self).__init__(**kwargs) self.dataset_type = kwargs.get('dataset_type', None) self.dataset_registration = kwargs.get('dataset_registration', None) self.dataset_output_options = kwargs.get('dataset_output_options', None) class AetherDatasetOutputOptions(msrest.serialization.Model): """AetherDatasetOutputOptions. :ivar source_globs: :vartype source_globs: ~flow.models.AetherGlobsOptions :ivar path_on_datastore: :vartype path_on_datastore: str :ivar path_on_datastore_parameter_assignment: :vartype path_on_datastore_parameter_assignment: ~flow.models.AetherParameterAssignment """ _attribute_map = { 'source_globs': {'key': 'sourceGlobs', 'type': 'AetherGlobsOptions'}, 'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'}, 'path_on_datastore_parameter_assignment': {'key': 'PathOnDatastoreParameterAssignment', 'type': 'AetherParameterAssignment'}, } def __init__( self, **kwargs ): """ :keyword source_globs: :paramtype source_globs: ~flow.models.AetherGlobsOptions :keyword path_on_datastore: :paramtype path_on_datastore: str :keyword path_on_datastore_parameter_assignment: :paramtype path_on_datastore_parameter_assignment: ~flow.models.AetherParameterAssignment """ super(AetherDatasetOutputOptions, self).__init__(**kwargs) self.source_globs = kwargs.get('source_globs', None) self.path_on_datastore = kwargs.get('path_on_datastore', None) self.path_on_datastore_parameter_assignment = kwargs.get('path_on_datastore_parameter_assignment', None) class AetherDatasetRegistration(msrest.serialization.Model): """AetherDatasetRegistration. :ivar name: :vartype name: str :ivar create_new_version: :vartype create_new_version: bool :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar additional_transformations: :vartype additional_transformations: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'create_new_version': {'key': 'createNewVersion', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword create_new_version: :paramtype create_new_version: bool :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword additional_transformations: :paramtype additional_transformations: str """ super(AetherDatasetRegistration, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.create_new_version = kwargs.get('create_new_version', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.additional_transformations = kwargs.get('additional_transformations', None) class AetherDataSettings(msrest.serialization.Model): """AetherDataSettings. :ivar target_column_name: :vartype target_column_name: str :ivar weight_column_name: :vartype weight_column_name: str :ivar positive_label: :vartype positive_label: str :ivar validation_data: :vartype validation_data: ~flow.models.AetherValidationDataSettings :ivar test_data: :vartype test_data: ~flow.models.AetherTestDataSettings """ _attribute_map = { 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, 'positive_label': {'key': 'positiveLabel', 'type': 'str'}, 'validation_data': {'key': 'validationData', 'type': 'AetherValidationDataSettings'}, 'test_data': {'key': 'testData', 'type': 'AetherTestDataSettings'}, } def __init__( self, **kwargs ): """ :keyword target_column_name: :paramtype target_column_name: str :keyword weight_column_name: :paramtype weight_column_name: str :keyword positive_label: :paramtype positive_label: str :keyword validation_data: :paramtype validation_data: ~flow.models.AetherValidationDataSettings :keyword test_data: :paramtype test_data: ~flow.models.AetherTestDataSettings """ super(AetherDataSettings, self).__init__(**kwargs) self.target_column_name = kwargs.get('target_column_name', None) self.weight_column_name = kwargs.get('weight_column_name', None) self.positive_label = kwargs.get('positive_label', None) self.validation_data = kwargs.get('validation_data', None) self.test_data = kwargs.get('test_data', None) class AetherDatastoreSetting(msrest.serialization.Model): """AetherDatastoreSetting. :ivar data_store_name: :vartype data_store_name: str """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str """ super(AetherDatastoreSetting, self).__init__(**kwargs) self.data_store_name = kwargs.get('data_store_name', None) class AetherDataTransferCloudConfiguration(msrest.serialization.Model): """AetherDataTransferCloudConfiguration. :ivar allow_overwrite: :vartype allow_overwrite: bool """ _attribute_map = { 'allow_overwrite': {'key': 'AllowOverwrite', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword allow_overwrite: :paramtype allow_overwrite: bool """ super(AetherDataTransferCloudConfiguration, self).__init__(**kwargs) self.allow_overwrite = kwargs.get('allow_overwrite', None) class AetherDataTransferSink(msrest.serialization.Model): """AetherDataTransferSink. :ivar type: Possible values include: "DataBase", "FileSystem". :vartype type: str or ~flow.models.AetherDataTransferStorageType :ivar file_system: :vartype file_system: ~flow.models.AetherFileSystem :ivar database_sink: :vartype database_sink: ~flow.models.AetherDatabaseSink """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'file_system': {'key': 'fileSystem', 'type': 'AetherFileSystem'}, 'database_sink': {'key': 'databaseSink', 'type': 'AetherDatabaseSink'}, } def __init__( self, **kwargs ): """ :keyword type: Possible values include: "DataBase", "FileSystem". :paramtype type: str or ~flow.models.AetherDataTransferStorageType :keyword file_system: :paramtype file_system: ~flow.models.AetherFileSystem :keyword database_sink: :paramtype database_sink: ~flow.models.AetherDatabaseSink """ super(AetherDataTransferSink, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.file_system = kwargs.get('file_system', None) self.database_sink = kwargs.get('database_sink', None) class AetherDataTransferSource(msrest.serialization.Model): """AetherDataTransferSource. :ivar type: Possible values include: "DataBase", "FileSystem". :vartype type: str or ~flow.models.AetherDataTransferStorageType :ivar file_system: :vartype file_system: ~flow.models.AetherFileSystem :ivar database_source: :vartype database_source: ~flow.models.AetherDatabaseSource """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'file_system': {'key': 'fileSystem', 'type': 'AetherFileSystem'}, 'database_source': {'key': 'databaseSource', 'type': 'AetherDatabaseSource'}, } def __init__( self, **kwargs ): """ :keyword type: Possible values include: "DataBase", "FileSystem". :paramtype type: str or ~flow.models.AetherDataTransferStorageType :keyword file_system: :paramtype file_system: ~flow.models.AetherFileSystem :keyword database_source: :paramtype database_source: ~flow.models.AetherDatabaseSource """ super(AetherDataTransferSource, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.file_system = kwargs.get('file_system', None) self.database_source = kwargs.get('database_source', None) class AetherDataTransferV2CloudSetting(msrest.serialization.Model): """AetherDataTransferV2CloudSetting. :ivar task_type: Possible values include: "ImportData", "ExportData", "CopyData". :vartype task_type: str or ~flow.models.AetherDataTransferTaskType :ivar compute_name: :vartype compute_name: str :ivar copy_data_task: :vartype copy_data_task: ~flow.models.AetherCopyDataTask :ivar import_data_task: :vartype import_data_task: ~flow.models.AetherImportDataTask :ivar export_data_task: :vartype export_data_task: ~flow.models.AetherExportDataTask :ivar data_transfer_sources: This is a dictionary. :vartype data_transfer_sources: dict[str, ~flow.models.AetherDataTransferSource] :ivar data_transfer_sinks: This is a dictionary. :vartype data_transfer_sinks: dict[str, ~flow.models.AetherDataTransferSink] :ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :vartype data_copy_mode: str or ~flow.models.AetherDataCopyMode """ _attribute_map = { 'task_type': {'key': 'taskType', 'type': 'str'}, 'compute_name': {'key': 'ComputeName', 'type': 'str'}, 'copy_data_task': {'key': 'CopyDataTask', 'type': 'AetherCopyDataTask'}, 'import_data_task': {'key': 'ImportDataTask', 'type': 'AetherImportDataTask'}, 'export_data_task': {'key': 'ExportDataTask', 'type': 'AetherExportDataTask'}, 'data_transfer_sources': {'key': 'DataTransferSources', 'type': '{AetherDataTransferSource}'}, 'data_transfer_sinks': {'key': 'DataTransferSinks', 'type': '{AetherDataTransferSink}'}, 'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword task_type: Possible values include: "ImportData", "ExportData", "CopyData". :paramtype task_type: str or ~flow.models.AetherDataTransferTaskType :keyword compute_name: :paramtype compute_name: str :keyword copy_data_task: :paramtype copy_data_task: ~flow.models.AetherCopyDataTask :keyword import_data_task: :paramtype import_data_task: ~flow.models.AetherImportDataTask :keyword export_data_task: :paramtype export_data_task: ~flow.models.AetherExportDataTask :keyword data_transfer_sources: This is a dictionary. :paramtype data_transfer_sources: dict[str, ~flow.models.AetherDataTransferSource] :keyword data_transfer_sinks: This is a dictionary. :paramtype data_transfer_sinks: dict[str, ~flow.models.AetherDataTransferSink] :keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :paramtype data_copy_mode: str or ~flow.models.AetherDataCopyMode """ super(AetherDataTransferV2CloudSetting, self).__init__(**kwargs) self.task_type = kwargs.get('task_type', None) self.compute_name = kwargs.get('compute_name', None) self.copy_data_task = kwargs.get('copy_data_task', None) self.import_data_task = kwargs.get('import_data_task', None) self.export_data_task = kwargs.get('export_data_task', None) self.data_transfer_sources = kwargs.get('data_transfer_sources', None) self.data_transfer_sinks = kwargs.get('data_transfer_sinks', None) self.data_copy_mode = kwargs.get('data_copy_mode', None) class AetherDBFSReference(msrest.serialization.Model): """AetherDBFSReference. :ivar relative_path: :vartype relative_path: str :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword relative_path: :paramtype relative_path: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AetherDBFSReference, self).__init__(**kwargs) self.relative_path = kwargs.get('relative_path', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) class AetherDockerSettingConfiguration(msrest.serialization.Model): """AetherDockerSettingConfiguration. :ivar use_docker: :vartype use_docker: bool :ivar shared_volumes: :vartype shared_volumes: bool :ivar shm_size: :vartype shm_size: str :ivar arguments: :vartype arguments: list[str] """ _attribute_map = { 'use_docker': {'key': 'useDocker', 'type': 'bool'}, 'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'}, 'shm_size': {'key': 'shmSize', 'type': 'str'}, 'arguments': {'key': 'arguments', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword use_docker: :paramtype use_docker: bool :keyword shared_volumes: :paramtype shared_volumes: bool :keyword shm_size: :paramtype shm_size: str :keyword arguments: :paramtype arguments: list[str] """ super(AetherDockerSettingConfiguration, self).__init__(**kwargs) self.use_docker = kwargs.get('use_docker', None) self.shared_volumes = kwargs.get('shared_volumes', None) self.shm_size = kwargs.get('shm_size', None) self.arguments = kwargs.get('arguments', None) class AetherDoWhileControlFlowInfo(msrest.serialization.Model): """AetherDoWhileControlFlowInfo. :ivar output_port_name_to_input_port_names_mapping: Dictionary of <componentsΒ·1f2aigmΒ·schemasΒ·aetherdowhilecontrolflowinfoΒ·propertiesΒ·outputportnametoinputportnamesmappingΒ·additionalproperties>. :vartype output_port_name_to_input_port_names_mapping: dict[str, list[str]] :ivar condition_output_port_name: :vartype condition_output_port_name: str :ivar run_settings: :vartype run_settings: ~flow.models.AetherDoWhileControlFlowRunSettings """ _attribute_map = { 'output_port_name_to_input_port_names_mapping': {'key': 'outputPortNameToInputPortNamesMapping', 'type': '{[str]}'}, 'condition_output_port_name': {'key': 'conditionOutputPortName', 'type': 'str'}, 'run_settings': {'key': 'runSettings', 'type': 'AetherDoWhileControlFlowRunSettings'}, } def __init__( self, **kwargs ): """ :keyword output_port_name_to_input_port_names_mapping: Dictionary of <componentsΒ·1f2aigmΒ·schemasΒ·aetherdowhilecontrolflowinfoΒ·propertiesΒ·outputportnametoinputportnamesmappingΒ·additionalproperties>. :paramtype output_port_name_to_input_port_names_mapping: dict[str, list[str]] :keyword condition_output_port_name: :paramtype condition_output_port_name: str :keyword run_settings: :paramtype run_settings: ~flow.models.AetherDoWhileControlFlowRunSettings """ super(AetherDoWhileControlFlowInfo, self).__init__(**kwargs) self.output_port_name_to_input_port_names_mapping = kwargs.get('output_port_name_to_input_port_names_mapping', None) self.condition_output_port_name = kwargs.get('condition_output_port_name', None) self.run_settings = kwargs.get('run_settings', None) class AetherDoWhileControlFlowRunSettings(msrest.serialization.Model): """AetherDoWhileControlFlowRunSettings. :ivar max_loop_iteration_count: :vartype max_loop_iteration_count: ~flow.models.AetherParameterAssignment """ _attribute_map = { 'max_loop_iteration_count': {'key': 'maxLoopIterationCount', 'type': 'AetherParameterAssignment'}, } def __init__( self, **kwargs ): """ :keyword max_loop_iteration_count: :paramtype max_loop_iteration_count: ~flow.models.AetherParameterAssignment """ super(AetherDoWhileControlFlowRunSettings, self).__init__(**kwargs) self.max_loop_iteration_count = kwargs.get('max_loop_iteration_count', None) class AetherEntityInterfaceDocumentation(msrest.serialization.Model): """AetherEntityInterfaceDocumentation. :ivar inputs_documentation: Dictionary of :code:`<string>`. :vartype inputs_documentation: dict[str, str] :ivar outputs_documentation: Dictionary of :code:`<string>`. :vartype outputs_documentation: dict[str, str] :ivar parameters_documentation: Dictionary of :code:`<string>`. :vartype parameters_documentation: dict[str, str] """ _attribute_map = { 'inputs_documentation': {'key': 'inputsDocumentation', 'type': '{str}'}, 'outputs_documentation': {'key': 'outputsDocumentation', 'type': '{str}'}, 'parameters_documentation': {'key': 'parametersDocumentation', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword inputs_documentation: Dictionary of :code:`<string>`. :paramtype inputs_documentation: dict[str, str] :keyword outputs_documentation: Dictionary of :code:`<string>`. :paramtype outputs_documentation: dict[str, str] :keyword parameters_documentation: Dictionary of :code:`<string>`. :paramtype parameters_documentation: dict[str, str] """ super(AetherEntityInterfaceDocumentation, self).__init__(**kwargs) self.inputs_documentation = kwargs.get('inputs_documentation', None) self.outputs_documentation = kwargs.get('outputs_documentation', None) self.parameters_documentation = kwargs.get('parameters_documentation', None) class AetherEntrySetting(msrest.serialization.Model): """AetherEntrySetting. :ivar file: :vartype file: str :ivar class_name: :vartype class_name: str """ _attribute_map = { 'file': {'key': 'file', 'type': 'str'}, 'class_name': {'key': 'className', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword file: :paramtype file: str :keyword class_name: :paramtype class_name: str """ super(AetherEntrySetting, self).__init__(**kwargs) self.file = kwargs.get('file', None) self.class_name = kwargs.get('class_name', None) class AetherEnvironmentConfiguration(msrest.serialization.Model): """AetherEnvironmentConfiguration. :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar use_environment_definition: :vartype use_environment_definition: bool :ivar environment_definition_string: :vartype environment_definition_string: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'use_environment_definition': {'key': 'useEnvironmentDefinition', 'type': 'bool'}, 'environment_definition_string': {'key': 'environmentDefinitionString', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword use_environment_definition: :paramtype use_environment_definition: bool :keyword environment_definition_string: :paramtype environment_definition_string: str """ super(AetherEnvironmentConfiguration, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) self.use_environment_definition = kwargs.get('use_environment_definition', None) self.environment_definition_string = kwargs.get('environment_definition_string', None) class AetherEsCloudConfiguration(msrest.serialization.Model): """AetherEsCloudConfiguration. :ivar enable_output_to_file_based_on_data_type_id: :vartype enable_output_to_file_based_on_data_type_id: bool :ivar aml_compute_priority_internal: :vartype aml_compute_priority_internal: ~flow.models.AetherPriorityConfiguration :ivar itp_priority_internal: :vartype itp_priority_internal: ~flow.models.AetherPriorityConfiguration :ivar singularity_priority_internal: :vartype singularity_priority_internal: ~flow.models.AetherPriorityConfiguration :ivar environment: :vartype environment: ~flow.models.AetherEnvironmentConfiguration :ivar hyper_drive_configuration: :vartype hyper_drive_configuration: ~flow.models.AetherHyperDriveConfiguration :ivar k8_s_config: :vartype k8_s_config: ~flow.models.AetherK8SConfiguration :ivar resource_config: :vartype resource_config: ~flow.models.AetherResourceConfiguration :ivar torch_distributed_config: :vartype torch_distributed_config: ~flow.models.AetherTorchDistributedConfiguration :ivar target_selector_config: :vartype target_selector_config: ~flow.models.AetherTargetSelectorConfiguration :ivar docker_config: :vartype docker_config: ~flow.models.AetherDockerSettingConfiguration :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar max_run_duration_seconds: :vartype max_run_duration_seconds: int :ivar identity: :vartype identity: ~flow.models.AetherIdentitySetting :ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`. :vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration] :ivar run_config: :vartype run_config: str """ _attribute_map = { 'enable_output_to_file_based_on_data_type_id': {'key': 'enableOutputToFileBasedOnDataTypeId', 'type': 'bool'}, 'aml_compute_priority_internal': {'key': 'amlComputePriorityInternal', 'type': 'AetherPriorityConfiguration'}, 'itp_priority_internal': {'key': 'itpPriorityInternal', 'type': 'AetherPriorityConfiguration'}, 'singularity_priority_internal': {'key': 'singularityPriorityInternal', 'type': 'AetherPriorityConfiguration'}, 'environment': {'key': 'environment', 'type': 'AetherEnvironmentConfiguration'}, 'hyper_drive_configuration': {'key': 'hyperDriveConfiguration', 'type': 'AetherHyperDriveConfiguration'}, 'k8_s_config': {'key': 'k8sConfig', 'type': 'AetherK8SConfiguration'}, 'resource_config': {'key': 'resourceConfig', 'type': 'AetherResourceConfiguration'}, 'torch_distributed_config': {'key': 'torchDistributedConfig', 'type': 'AetherTorchDistributedConfiguration'}, 'target_selector_config': {'key': 'targetSelectorConfig', 'type': 'AetherTargetSelectorConfiguration'}, 'docker_config': {'key': 'dockerConfig', 'type': 'AetherDockerSettingConfiguration'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'int'}, 'identity': {'key': 'identity', 'type': 'AetherIdentitySetting'}, 'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'}, 'run_config': {'key': 'runConfig', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword enable_output_to_file_based_on_data_type_id: :paramtype enable_output_to_file_based_on_data_type_id: bool :keyword aml_compute_priority_internal: :paramtype aml_compute_priority_internal: ~flow.models.AetherPriorityConfiguration :keyword itp_priority_internal: :paramtype itp_priority_internal: ~flow.models.AetherPriorityConfiguration :keyword singularity_priority_internal: :paramtype singularity_priority_internal: ~flow.models.AetherPriorityConfiguration :keyword environment: :paramtype environment: ~flow.models.AetherEnvironmentConfiguration :keyword hyper_drive_configuration: :paramtype hyper_drive_configuration: ~flow.models.AetherHyperDriveConfiguration :keyword k8_s_config: :paramtype k8_s_config: ~flow.models.AetherK8SConfiguration :keyword resource_config: :paramtype resource_config: ~flow.models.AetherResourceConfiguration :keyword torch_distributed_config: :paramtype torch_distributed_config: ~flow.models.AetherTorchDistributedConfiguration :keyword target_selector_config: :paramtype target_selector_config: ~flow.models.AetherTargetSelectorConfiguration :keyword docker_config: :paramtype docker_config: ~flow.models.AetherDockerSettingConfiguration :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword max_run_duration_seconds: :paramtype max_run_duration_seconds: int :keyword identity: :paramtype identity: ~flow.models.AetherIdentitySetting :keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`. :paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration] :keyword run_config: :paramtype run_config: str """ super(AetherEsCloudConfiguration, self).__init__(**kwargs) self.enable_output_to_file_based_on_data_type_id = kwargs.get('enable_output_to_file_based_on_data_type_id', None) self.aml_compute_priority_internal = kwargs.get('aml_compute_priority_internal', None) self.itp_priority_internal = kwargs.get('itp_priority_internal', None) self.singularity_priority_internal = kwargs.get('singularity_priority_internal', None) self.environment = kwargs.get('environment', None) self.hyper_drive_configuration = kwargs.get('hyper_drive_configuration', None) self.k8_s_config = kwargs.get('k8_s_config', None) self.resource_config = kwargs.get('resource_config', None) self.torch_distributed_config = kwargs.get('torch_distributed_config', None) self.target_selector_config = kwargs.get('target_selector_config', None) self.docker_config = kwargs.get('docker_config', None) self.environment_variables = kwargs.get('environment_variables', None) self.max_run_duration_seconds = kwargs.get('max_run_duration_seconds', None) self.identity = kwargs.get('identity', None) self.application_endpoints = kwargs.get('application_endpoints', None) self.run_config = kwargs.get('run_config', None) class AetherExportDataTask(msrest.serialization.Model): """AetherExportDataTask. :ivar data_transfer_sink: :vartype data_transfer_sink: ~flow.models.AetherDataTransferSink """ _attribute_map = { 'data_transfer_sink': {'key': 'DataTransferSink', 'type': 'AetherDataTransferSink'}, } def __init__( self, **kwargs ): """ :keyword data_transfer_sink: :paramtype data_transfer_sink: ~flow.models.AetherDataTransferSink """ super(AetherExportDataTask, self).__init__(**kwargs) self.data_transfer_sink = kwargs.get('data_transfer_sink', None) class AetherFeaturizationSettings(msrest.serialization.Model): """AetherFeaturizationSettings. :ivar mode: Possible values include: "Auto", "Custom", "Off". :vartype mode: str or ~flow.models.AetherFeaturizationMode :ivar blocked_transformers: :vartype blocked_transformers: list[str] :ivar column_purposes: Dictionary of :code:`<string>`. :vartype column_purposes: dict[str, str] :ivar drop_columns: :vartype drop_columns: list[str] :ivar transformer_params: Dictionary of <componentsΒ·1y90i4mΒ·schemasΒ·aetherfeaturizationsettingsΒ·propertiesΒ·transformerparamsΒ·additionalproperties>. :vartype transformer_params: dict[str, list[~flow.models.AetherColumnTransformer]] :ivar dataset_language: :vartype dataset_language: str :ivar enable_dnn_featurization: :vartype enable_dnn_featurization: bool """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'}, 'column_purposes': {'key': 'columnPurposes', 'type': '{str}'}, 'drop_columns': {'key': 'dropColumns', 'type': '[str]'}, 'transformer_params': {'key': 'transformerParams', 'type': '{[AetherColumnTransformer]}'}, 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, 'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom", "Off". :paramtype mode: str or ~flow.models.AetherFeaturizationMode :keyword blocked_transformers: :paramtype blocked_transformers: list[str] :keyword column_purposes: Dictionary of :code:`<string>`. :paramtype column_purposes: dict[str, str] :keyword drop_columns: :paramtype drop_columns: list[str] :keyword transformer_params: Dictionary of <componentsΒ·1y90i4mΒ·schemasΒ·aetherfeaturizationsettingsΒ·propertiesΒ·transformerparamsΒ·additionalproperties>. :paramtype transformer_params: dict[str, list[~flow.models.AetherColumnTransformer]] :keyword dataset_language: :paramtype dataset_language: str :keyword enable_dnn_featurization: :paramtype enable_dnn_featurization: bool """ super(AetherFeaturizationSettings, self).__init__(**kwargs) self.mode = kwargs.get('mode', None) self.blocked_transformers = kwargs.get('blocked_transformers', None) self.column_purposes = kwargs.get('column_purposes', None) self.drop_columns = kwargs.get('drop_columns', None) self.transformer_params = kwargs.get('transformer_params', None) self.dataset_language = kwargs.get('dataset_language', None) self.enable_dnn_featurization = kwargs.get('enable_dnn_featurization', None) class AetherFileSystem(msrest.serialization.Model): """AetherFileSystem. :ivar connection: :vartype connection: str :ivar path: :vartype path: str """ _attribute_map = { 'connection': {'key': 'connection', 'type': 'str'}, 'path': {'key': 'path', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword connection: :paramtype connection: str :keyword path: :paramtype path: str """ super(AetherFileSystem, self).__init__(**kwargs) self.connection = kwargs.get('connection', None) self.path = kwargs.get('path', None) class AetherForecastHorizon(msrest.serialization.Model): """AetherForecastHorizon. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.AetherForecastHorizonMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.AetherForecastHorizonMode :keyword value: :paramtype value: int """ super(AetherForecastHorizon, self).__init__(**kwargs) self.mode = kwargs.get('mode', None) self.value = kwargs.get('value', None) class AetherForecastingSettings(msrest.serialization.Model): """AetherForecastingSettings. :ivar country_or_region_for_holidays: :vartype country_or_region_for_holidays: str :ivar time_column_name: :vartype time_column_name: str :ivar target_lags: :vartype target_lags: ~flow.models.AetherTargetLags :ivar target_rolling_window_size: :vartype target_rolling_window_size: ~flow.models.AetherTargetRollingWindowSize :ivar forecast_horizon: :vartype forecast_horizon: ~flow.models.AetherForecastHorizon :ivar time_series_id_column_names: :vartype time_series_id_column_names: list[str] :ivar frequency: :vartype frequency: str :ivar feature_lags: :vartype feature_lags: str :ivar seasonality: :vartype seasonality: ~flow.models.AetherSeasonality :ivar short_series_handling_config: Possible values include: "Auto", "Pad", "Drop". :vartype short_series_handling_config: str or ~flow.models.AetherShortSeriesHandlingConfiguration :ivar use_stl: Possible values include: "Season", "SeasonTrend". :vartype use_stl: str or ~flow.models.AetherUseStl :ivar target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean". :vartype target_aggregate_function: str or ~flow.models.AetherTargetAggregationFunction :ivar cv_step_size: :vartype cv_step_size: int :ivar features_unknown_at_forecast_time: :vartype features_unknown_at_forecast_time: list[str] """ _attribute_map = { 'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'}, 'time_column_name': {'key': 'timeColumnName', 'type': 'str'}, 'target_lags': {'key': 'targetLags', 'type': 'AetherTargetLags'}, 'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'AetherTargetRollingWindowSize'}, 'forecast_horizon': {'key': 'forecastHorizon', 'type': 'AetherForecastHorizon'}, 'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'}, 'frequency': {'key': 'frequency', 'type': 'str'}, 'feature_lags': {'key': 'featureLags', 'type': 'str'}, 'seasonality': {'key': 'seasonality', 'type': 'AetherSeasonality'}, 'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'}, 'use_stl': {'key': 'useStl', 'type': 'str'}, 'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'}, 'cv_step_size': {'key': 'cvStepSize', 'type': 'int'}, 'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword country_or_region_for_holidays: :paramtype country_or_region_for_holidays: str :keyword time_column_name: :paramtype time_column_name: str :keyword target_lags: :paramtype target_lags: ~flow.models.AetherTargetLags :keyword target_rolling_window_size: :paramtype target_rolling_window_size: ~flow.models.AetherTargetRollingWindowSize :keyword forecast_horizon: :paramtype forecast_horizon: ~flow.models.AetherForecastHorizon :keyword time_series_id_column_names: :paramtype time_series_id_column_names: list[str] :keyword frequency: :paramtype frequency: str :keyword feature_lags: :paramtype feature_lags: str :keyword seasonality: :paramtype seasonality: ~flow.models.AetherSeasonality :keyword short_series_handling_config: Possible values include: "Auto", "Pad", "Drop". :paramtype short_series_handling_config: str or ~flow.models.AetherShortSeriesHandlingConfiguration :keyword use_stl: Possible values include: "Season", "SeasonTrend". :paramtype use_stl: str or ~flow.models.AetherUseStl :keyword target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean". :paramtype target_aggregate_function: str or ~flow.models.AetherTargetAggregationFunction :keyword cv_step_size: :paramtype cv_step_size: int :keyword features_unknown_at_forecast_time: :paramtype features_unknown_at_forecast_time: list[str] """ super(AetherForecastingSettings, self).__init__(**kwargs) self.country_or_region_for_holidays = kwargs.get('country_or_region_for_holidays', None) self.time_column_name = kwargs.get('time_column_name', None) self.target_lags = kwargs.get('target_lags', None) self.target_rolling_window_size = kwargs.get('target_rolling_window_size', None) self.forecast_horizon = kwargs.get('forecast_horizon', None) self.time_series_id_column_names = kwargs.get('time_series_id_column_names', None) self.frequency = kwargs.get('frequency', None) self.feature_lags = kwargs.get('feature_lags', None) self.seasonality = kwargs.get('seasonality', None) self.short_series_handling_config = kwargs.get('short_series_handling_config', None) self.use_stl = kwargs.get('use_stl', None) self.target_aggregate_function = kwargs.get('target_aggregate_function', None) self.cv_step_size = kwargs.get('cv_step_size', None) self.features_unknown_at_forecast_time = kwargs.get('features_unknown_at_forecast_time', None) class AetherGeneralSettings(msrest.serialization.Model): """AetherGeneralSettings. :ivar primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError", "NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou". :vartype primary_metric: str or ~flow.models.AetherPrimaryMetrics :ivar task_type: Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER", "TextClassificationMultilabel". :vartype task_type: str or ~flow.models.AetherTaskType :ivar log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error", "Critical". :vartype log_verbosity: str or ~flow.models.AetherLogVerbosity """ _attribute_map = { 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, 'task_type': {'key': 'taskType', 'type': 'str'}, 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError", "NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou". :paramtype primary_metric: str or ~flow.models.AetherPrimaryMetrics :keyword task_type: Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER", "TextClassificationMultilabel". :paramtype task_type: str or ~flow.models.AetherTaskType :keyword log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error", "Critical". :paramtype log_verbosity: str or ~flow.models.AetherLogVerbosity """ super(AetherGeneralSettings, self).__init__(**kwargs) self.primary_metric = kwargs.get('primary_metric', None) self.task_type = kwargs.get('task_type', None) self.log_verbosity = kwargs.get('log_verbosity', None) class AetherGlobsOptions(msrest.serialization.Model): """AetherGlobsOptions. :ivar glob_patterns: :vartype glob_patterns: list[str] """ _attribute_map = { 'glob_patterns': {'key': 'globPatterns', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword glob_patterns: :paramtype glob_patterns: list[str] """ super(AetherGlobsOptions, self).__init__(**kwargs) self.glob_patterns = kwargs.get('glob_patterns', None) class AetherGraphControlNode(msrest.serialization.Model): """AetherGraphControlNode. :ivar id: :vartype id: str :ivar control_type: The only acceptable values to pass in are None and "IfElse". The default value is None. :vartype control_type: str :ivar control_parameter: :vartype control_parameter: ~flow.models.AetherParameterAssignment :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'control_type': {'key': 'controlType', 'type': 'str'}, 'control_parameter': {'key': 'controlParameter', 'type': 'AetherParameterAssignment'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword control_type: The only acceptable values to pass in are None and "IfElse". The default value is None. :paramtype control_type: str :keyword control_parameter: :paramtype control_parameter: ~flow.models.AetherParameterAssignment :keyword run_attribution: :paramtype run_attribution: str """ super(AetherGraphControlNode, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.control_type = kwargs.get('control_type', None) self.control_parameter = kwargs.get('control_parameter', None) self.run_attribution = kwargs.get('run_attribution', None) class AetherGraphControlReferenceNode(msrest.serialization.Model): """AetherGraphControlReferenceNode. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar comment: :vartype comment: str :ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor". :vartype control_flow_type: str or ~flow.models.AetherControlFlowType :ivar reference_node_id: :vartype reference_node_id: str :ivar do_while_control_flow_info: :vartype do_while_control_flow_info: ~flow.models.AetherDoWhileControlFlowInfo :ivar parallel_for_control_flow_info: :vartype parallel_for_control_flow_info: ~flow.models.AetherParallelForControlFlowInfo :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'comment': {'key': 'comment', 'type': 'str'}, 'control_flow_type': {'key': 'controlFlowType', 'type': 'str'}, 'reference_node_id': {'key': 'referenceNodeId', 'type': 'str'}, 'do_while_control_flow_info': {'key': 'doWhileControlFlowInfo', 'type': 'AetherDoWhileControlFlowInfo'}, 'parallel_for_control_flow_info': {'key': 'parallelForControlFlowInfo', 'type': 'AetherParallelForControlFlowInfo'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword comment: :paramtype comment: str :keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor". :paramtype control_flow_type: str or ~flow.models.AetherControlFlowType :keyword reference_node_id: :paramtype reference_node_id: str :keyword do_while_control_flow_info: :paramtype do_while_control_flow_info: ~flow.models.AetherDoWhileControlFlowInfo :keyword parallel_for_control_flow_info: :paramtype parallel_for_control_flow_info: ~flow.models.AetherParallelForControlFlowInfo :keyword run_attribution: :paramtype run_attribution: str """ super(AetherGraphControlReferenceNode, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.name = kwargs.get('name', None) self.comment = kwargs.get('comment', None) self.control_flow_type = kwargs.get('control_flow_type', None) self.reference_node_id = kwargs.get('reference_node_id', None) self.do_while_control_flow_info = kwargs.get('do_while_control_flow_info', None) self.parallel_for_control_flow_info = kwargs.get('parallel_for_control_flow_info', None) self.run_attribution = kwargs.get('run_attribution', None) class AetherGraphDatasetNode(msrest.serialization.Model): """AetherGraphDatasetNode. :ivar id: :vartype id: str :ivar dataset_id: :vartype dataset_id: str :ivar data_path_parameter_name: :vartype data_path_parameter_name: str :ivar data_set_definition: :vartype data_set_definition: ~flow.models.AetherDataSetDefinition """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'dataset_id': {'key': 'datasetId', 'type': 'str'}, 'data_path_parameter_name': {'key': 'dataPathParameterName', 'type': 'str'}, 'data_set_definition': {'key': 'dataSetDefinition', 'type': 'AetherDataSetDefinition'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword dataset_id: :paramtype dataset_id: str :keyword data_path_parameter_name: :paramtype data_path_parameter_name: str :keyword data_set_definition: :paramtype data_set_definition: ~flow.models.AetherDataSetDefinition """ super(AetherGraphDatasetNode, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.dataset_id = kwargs.get('dataset_id', None) self.data_path_parameter_name = kwargs.get('data_path_parameter_name', None) self.data_set_definition = kwargs.get('data_set_definition', None) class AetherGraphEdge(msrest.serialization.Model): """AetherGraphEdge. :ivar source_output_port: :vartype source_output_port: ~flow.models.AetherPortInfo :ivar destination_input_port: :vartype destination_input_port: ~flow.models.AetherPortInfo """ _attribute_map = { 'source_output_port': {'key': 'sourceOutputPort', 'type': 'AetherPortInfo'}, 'destination_input_port': {'key': 'destinationInputPort', 'type': 'AetherPortInfo'}, } def __init__( self, **kwargs ): """ :keyword source_output_port: :paramtype source_output_port: ~flow.models.AetherPortInfo :keyword destination_input_port: :paramtype destination_input_port: ~flow.models.AetherPortInfo """ super(AetherGraphEdge, self).__init__(**kwargs) self.source_output_port = kwargs.get('source_output_port', None) self.destination_input_port = kwargs.get('destination_input_port', None) class AetherGraphEntity(msrest.serialization.Model): """AetherGraphEntity. :ivar module_nodes: :vartype module_nodes: list[~flow.models.AetherGraphModuleNode] :ivar dataset_nodes: :vartype dataset_nodes: list[~flow.models.AetherGraphDatasetNode] :ivar sub_graph_nodes: :vartype sub_graph_nodes: list[~flow.models.AetherGraphReferenceNode] :ivar control_reference_nodes: :vartype control_reference_nodes: list[~flow.models.AetherGraphControlReferenceNode] :ivar control_nodes: :vartype control_nodes: list[~flow.models.AetherGraphControlNode] :ivar edges: :vartype edges: list[~flow.models.AetherGraphEdge] :ivar default_compute: :vartype default_compute: ~flow.models.AetherComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.AetherDatastoreSetting :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting :ivar parent_sub_graph_module_ids: :vartype parent_sub_graph_module_ids: list[str] :ivar id: :vartype id: str :ivar workspace_id: :vartype workspace_id: str :ivar etag: :vartype etag: str :ivar tags: A set of tags. :vartype tags: list[str] :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.AetherEntityStatus """ _attribute_map = { 'module_nodes': {'key': 'moduleNodes', 'type': '[AetherGraphModuleNode]'}, 'dataset_nodes': {'key': 'datasetNodes', 'type': '[AetherGraphDatasetNode]'}, 'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[AetherGraphReferenceNode]'}, 'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[AetherGraphControlReferenceNode]'}, 'control_nodes': {'key': 'controlNodes', 'type': '[AetherGraphControlNode]'}, 'edges': {'key': 'edges', 'type': '[AetherGraphEdge]'}, 'default_compute': {'key': 'defaultCompute', 'type': 'AetherComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'AetherDatastoreSetting'}, 'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'AetherCloudPrioritySetting'}, 'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'}, 'id': {'key': 'id', 'type': 'str'}, 'workspace_id': {'key': 'workspaceId', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '[str]'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword module_nodes: :paramtype module_nodes: list[~flow.models.AetherGraphModuleNode] :keyword dataset_nodes: :paramtype dataset_nodes: list[~flow.models.AetherGraphDatasetNode] :keyword sub_graph_nodes: :paramtype sub_graph_nodes: list[~flow.models.AetherGraphReferenceNode] :keyword control_reference_nodes: :paramtype control_reference_nodes: list[~flow.models.AetherGraphControlReferenceNode] :keyword control_nodes: :paramtype control_nodes: list[~flow.models.AetherGraphControlNode] :keyword edges: :paramtype edges: list[~flow.models.AetherGraphEdge] :keyword default_compute: :paramtype default_compute: ~flow.models.AetherComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.AetherDatastoreSetting :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting :keyword parent_sub_graph_module_ids: :paramtype parent_sub_graph_module_ids: list[str] :keyword id: :paramtype id: str :keyword workspace_id: :paramtype workspace_id: str :keyword etag: :paramtype etag: str :keyword tags: A set of tags. :paramtype tags: list[str] :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.AetherEntityStatus """ super(AetherGraphEntity, self).__init__(**kwargs) self.module_nodes = kwargs.get('module_nodes', None) self.dataset_nodes = kwargs.get('dataset_nodes', None) self.sub_graph_nodes = kwargs.get('sub_graph_nodes', None) self.control_reference_nodes = kwargs.get('control_reference_nodes', None) self.control_nodes = kwargs.get('control_nodes', None) self.edges = kwargs.get('edges', None) self.default_compute = kwargs.get('default_compute', None) self.default_datastore = kwargs.get('default_datastore', None) self.default_cloud_priority = kwargs.get('default_cloud_priority', None) self.parent_sub_graph_module_ids = kwargs.get('parent_sub_graph_module_ids', None) self.id = kwargs.get('id', None) self.workspace_id = kwargs.get('workspace_id', None) self.etag = kwargs.get('etag', None) self.tags = kwargs.get('tags', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) self.entity_status = kwargs.get('entity_status', None) class AetherGraphModuleNode(msrest.serialization.Model): """AetherGraphModuleNode. :ivar cloud_priority: :vartype cloud_priority: int :ivar default_data_retention_hint: :vartype default_data_retention_hint: int :ivar compliance_cluster: :vartype compliance_cluster: str :ivar euclid_workspace_id: :vartype euclid_workspace_id: str :ivar attached_modules: :vartype attached_modules: list[str] :ivar acceptable_machine_clusters: :vartype acceptable_machine_clusters: list[str] :ivar custom_data_location_id: :vartype custom_data_location_id: str :ivar alert_timeout_duration: :vartype alert_timeout_duration: str :ivar runconfig: :vartype runconfig: str :ivar id: :vartype id: str :ivar module_id: :vartype module_id: str :ivar comment: :vartype comment: str :ivar name: :vartype name: str :ivar module_parameters: :vartype module_parameters: list[~flow.models.AetherParameterAssignment] :ivar module_metadata_parameters: :vartype module_metadata_parameters: list[~flow.models.AetherParameterAssignment] :ivar module_output_settings: :vartype module_output_settings: list[~flow.models.AetherOutputSetting] :ivar module_input_settings: :vartype module_input_settings: list[~flow.models.AetherInputSetting] :ivar use_graph_default_compute: :vartype use_graph_default_compute: bool :ivar use_graph_default_datastore: :vartype use_graph_default_datastore: bool :ivar regenerate_output: :vartype regenerate_output: bool :ivar control_inputs: :vartype control_inputs: list[~flow.models.AetherControlInput] :ivar cloud_settings: :vartype cloud_settings: ~flow.models.AetherCloudSettings :ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :vartype execution_phase: str or ~flow.models.AetherExecutionPhase :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'cloud_priority': {'key': 'cloudPriority', 'type': 'int'}, 'default_data_retention_hint': {'key': 'defaultDataRetentionHint', 'type': 'int'}, 'compliance_cluster': {'key': 'complianceCluster', 'type': 'str'}, 'euclid_workspace_id': {'key': 'euclidWorkspaceId', 'type': 'str'}, 'attached_modules': {'key': 'attachedModules', 'type': '[str]'}, 'acceptable_machine_clusters': {'key': 'acceptableMachineClusters', 'type': '[str]'}, 'custom_data_location_id': {'key': 'customDataLocationId', 'type': 'str'}, 'alert_timeout_duration': {'key': 'alertTimeoutDuration', 'type': 'str'}, 'runconfig': {'key': 'runconfig', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'module_id': {'key': 'moduleId', 'type': 'str'}, 'comment': {'key': 'comment', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'module_parameters': {'key': 'moduleParameters', 'type': '[AetherParameterAssignment]'}, 'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[AetherParameterAssignment]'}, 'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[AetherOutputSetting]'}, 'module_input_settings': {'key': 'moduleInputSettings', 'type': '[AetherInputSetting]'}, 'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'}, 'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'}, 'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'}, 'control_inputs': {'key': 'controlInputs', 'type': '[AetherControlInput]'}, 'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'}, 'execution_phase': {'key': 'executionPhase', 'type': 'str'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword cloud_priority: :paramtype cloud_priority: int :keyword default_data_retention_hint: :paramtype default_data_retention_hint: int :keyword compliance_cluster: :paramtype compliance_cluster: str :keyword euclid_workspace_id: :paramtype euclid_workspace_id: str :keyword attached_modules: :paramtype attached_modules: list[str] :keyword acceptable_machine_clusters: :paramtype acceptable_machine_clusters: list[str] :keyword custom_data_location_id: :paramtype custom_data_location_id: str :keyword alert_timeout_duration: :paramtype alert_timeout_duration: str :keyword runconfig: :paramtype runconfig: str :keyword id: :paramtype id: str :keyword module_id: :paramtype module_id: str :keyword comment: :paramtype comment: str :keyword name: :paramtype name: str :keyword module_parameters: :paramtype module_parameters: list[~flow.models.AetherParameterAssignment] :keyword module_metadata_parameters: :paramtype module_metadata_parameters: list[~flow.models.AetherParameterAssignment] :keyword module_output_settings: :paramtype module_output_settings: list[~flow.models.AetherOutputSetting] :keyword module_input_settings: :paramtype module_input_settings: list[~flow.models.AetherInputSetting] :keyword use_graph_default_compute: :paramtype use_graph_default_compute: bool :keyword use_graph_default_datastore: :paramtype use_graph_default_datastore: bool :keyword regenerate_output: :paramtype regenerate_output: bool :keyword control_inputs: :paramtype control_inputs: list[~flow.models.AetherControlInput] :keyword cloud_settings: :paramtype cloud_settings: ~flow.models.AetherCloudSettings :keyword execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :paramtype execution_phase: str or ~flow.models.AetherExecutionPhase :keyword run_attribution: :paramtype run_attribution: str """ super(AetherGraphModuleNode, self).__init__(**kwargs) self.cloud_priority = kwargs.get('cloud_priority', None) self.default_data_retention_hint = kwargs.get('default_data_retention_hint', None) self.compliance_cluster = kwargs.get('compliance_cluster', None) self.euclid_workspace_id = kwargs.get('euclid_workspace_id', None) self.attached_modules = kwargs.get('attached_modules', None) self.acceptable_machine_clusters = kwargs.get('acceptable_machine_clusters', None) self.custom_data_location_id = kwargs.get('custom_data_location_id', None) self.alert_timeout_duration = kwargs.get('alert_timeout_duration', None) self.runconfig = kwargs.get('runconfig', None) self.id = kwargs.get('id', None) self.module_id = kwargs.get('module_id', None) self.comment = kwargs.get('comment', None) self.name = kwargs.get('name', None) self.module_parameters = kwargs.get('module_parameters', None) self.module_metadata_parameters = kwargs.get('module_metadata_parameters', None) self.module_output_settings = kwargs.get('module_output_settings', None) self.module_input_settings = kwargs.get('module_input_settings', None) self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None) self.use_graph_default_datastore = kwargs.get('use_graph_default_datastore', None) self.regenerate_output = kwargs.get('regenerate_output', None) self.control_inputs = kwargs.get('control_inputs', None) self.cloud_settings = kwargs.get('cloud_settings', None) self.execution_phase = kwargs.get('execution_phase', None) self.run_attribution = kwargs.get('run_attribution', None) class AetherGraphReferenceNode(msrest.serialization.Model): """AetherGraphReferenceNode. :ivar graph_id: :vartype graph_id: str :ivar default_compute: :vartype default_compute: ~flow.models.AetherComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.AetherDatastoreSetting :ivar id: :vartype id: str :ivar module_id: :vartype module_id: str :ivar comment: :vartype comment: str :ivar name: :vartype name: str :ivar module_parameters: :vartype module_parameters: list[~flow.models.AetherParameterAssignment] :ivar module_metadata_parameters: :vartype module_metadata_parameters: list[~flow.models.AetherParameterAssignment] :ivar module_output_settings: :vartype module_output_settings: list[~flow.models.AetherOutputSetting] :ivar module_input_settings: :vartype module_input_settings: list[~flow.models.AetherInputSetting] :ivar use_graph_default_compute: :vartype use_graph_default_compute: bool :ivar use_graph_default_datastore: :vartype use_graph_default_datastore: bool :ivar regenerate_output: :vartype regenerate_output: bool :ivar control_inputs: :vartype control_inputs: list[~flow.models.AetherControlInput] :ivar cloud_settings: :vartype cloud_settings: ~flow.models.AetherCloudSettings :ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :vartype execution_phase: str or ~flow.models.AetherExecutionPhase :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'graph_id': {'key': 'graphId', 'type': 'str'}, 'default_compute': {'key': 'defaultCompute', 'type': 'AetherComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'AetherDatastoreSetting'}, 'id': {'key': 'id', 'type': 'str'}, 'module_id': {'key': 'moduleId', 'type': 'str'}, 'comment': {'key': 'comment', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'module_parameters': {'key': 'moduleParameters', 'type': '[AetherParameterAssignment]'}, 'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[AetherParameterAssignment]'}, 'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[AetherOutputSetting]'}, 'module_input_settings': {'key': 'moduleInputSettings', 'type': '[AetherInputSetting]'}, 'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'}, 'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'}, 'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'}, 'control_inputs': {'key': 'controlInputs', 'type': '[AetherControlInput]'}, 'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'}, 'execution_phase': {'key': 'executionPhase', 'type': 'str'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword graph_id: :paramtype graph_id: str :keyword default_compute: :paramtype default_compute: ~flow.models.AetherComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.AetherDatastoreSetting :keyword id: :paramtype id: str :keyword module_id: :paramtype module_id: str :keyword comment: :paramtype comment: str :keyword name: :paramtype name: str :keyword module_parameters: :paramtype module_parameters: list[~flow.models.AetherParameterAssignment] :keyword module_metadata_parameters: :paramtype module_metadata_parameters: list[~flow.models.AetherParameterAssignment] :keyword module_output_settings: :paramtype module_output_settings: list[~flow.models.AetherOutputSetting] :keyword module_input_settings: :paramtype module_input_settings: list[~flow.models.AetherInputSetting] :keyword use_graph_default_compute: :paramtype use_graph_default_compute: bool :keyword use_graph_default_datastore: :paramtype use_graph_default_datastore: bool :keyword regenerate_output: :paramtype regenerate_output: bool :keyword control_inputs: :paramtype control_inputs: list[~flow.models.AetherControlInput] :keyword cloud_settings: :paramtype cloud_settings: ~flow.models.AetherCloudSettings :keyword execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :paramtype execution_phase: str or ~flow.models.AetherExecutionPhase :keyword run_attribution: :paramtype run_attribution: str """ super(AetherGraphReferenceNode, self).__init__(**kwargs) self.graph_id = kwargs.get('graph_id', None) self.default_compute = kwargs.get('default_compute', None) self.default_datastore = kwargs.get('default_datastore', None) self.id = kwargs.get('id', None) self.module_id = kwargs.get('module_id', None) self.comment = kwargs.get('comment', None) self.name = kwargs.get('name', None) self.module_parameters = kwargs.get('module_parameters', None) self.module_metadata_parameters = kwargs.get('module_metadata_parameters', None) self.module_output_settings = kwargs.get('module_output_settings', None) self.module_input_settings = kwargs.get('module_input_settings', None) self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None) self.use_graph_default_datastore = kwargs.get('use_graph_default_datastore', None) self.regenerate_output = kwargs.get('regenerate_output', None) self.control_inputs = kwargs.get('control_inputs', None) self.cloud_settings = kwargs.get('cloud_settings', None) self.execution_phase = kwargs.get('execution_phase', None) self.run_attribution = kwargs.get('run_attribution', None) class AetherHdfsReference(msrest.serialization.Model): """AetherHdfsReference. :ivar aml_data_store_name: :vartype aml_data_store_name: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword aml_data_store_name: :paramtype aml_data_store_name: str :keyword relative_path: :paramtype relative_path: str """ super(AetherHdfsReference, self).__init__(**kwargs) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) self.relative_path = kwargs.get('relative_path', None) class AetherHdiClusterComputeInfo(msrest.serialization.Model): """AetherHdiClusterComputeInfo. :ivar address: :vartype address: str :ivar username: :vartype username: str :ivar password: :vartype password: str :ivar private_key: :vartype private_key: str """ _attribute_map = { 'address': {'key': 'address', 'type': 'str'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'private_key': {'key': 'privateKey', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword address: :paramtype address: str :keyword username: :paramtype username: str :keyword password: :paramtype password: str :keyword private_key: :paramtype private_key: str """ super(AetherHdiClusterComputeInfo, self).__init__(**kwargs) self.address = kwargs.get('address', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.private_key = kwargs.get('private_key', None) class AetherHdiRunConfiguration(msrest.serialization.Model): """AetherHdiRunConfiguration. :ivar file: :vartype file: str :ivar class_name: :vartype class_name: str :ivar files: :vartype files: list[str] :ivar archives: :vartype archives: list[str] :ivar jars: :vartype jars: list[str] :ivar py_files: :vartype py_files: list[str] :ivar compute_name: :vartype compute_name: str :ivar queue: :vartype queue: str :ivar driver_memory: :vartype driver_memory: str :ivar driver_cores: :vartype driver_cores: int :ivar executor_memory: :vartype executor_memory: str :ivar executor_cores: :vartype executor_cores: int :ivar number_executors: :vartype number_executors: int :ivar conf: Dictionary of :code:`<string>`. :vartype conf: dict[str, str] :ivar name: :vartype name: str """ _attribute_map = { 'file': {'key': 'file', 'type': 'str'}, 'class_name': {'key': 'className', 'type': 'str'}, 'files': {'key': 'files', 'type': '[str]'}, 'archives': {'key': 'archives', 'type': '[str]'}, 'jars': {'key': 'jars', 'type': '[str]'}, 'py_files': {'key': 'pyFiles', 'type': '[str]'}, 'compute_name': {'key': 'computeName', 'type': 'str'}, 'queue': {'key': 'queue', 'type': 'str'}, 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, 'driver_cores': {'key': 'driverCores', 'type': 'int'}, 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, 'executor_cores': {'key': 'executorCores', 'type': 'int'}, 'number_executors': {'key': 'numberExecutors', 'type': 'int'}, 'conf': {'key': 'conf', 'type': '{str}'}, 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword file: :paramtype file: str :keyword class_name: :paramtype class_name: str :keyword files: :paramtype files: list[str] :keyword archives: :paramtype archives: list[str] :keyword jars: :paramtype jars: list[str] :keyword py_files: :paramtype py_files: list[str] :keyword compute_name: :paramtype compute_name: str :keyword queue: :paramtype queue: str :keyword driver_memory: :paramtype driver_memory: str :keyword driver_cores: :paramtype driver_cores: int :keyword executor_memory: :paramtype executor_memory: str :keyword executor_cores: :paramtype executor_cores: int :keyword number_executors: :paramtype number_executors: int :keyword conf: Dictionary of :code:`<string>`. :paramtype conf: dict[str, str] :keyword name: :paramtype name: str """ super(AetherHdiRunConfiguration, self).__init__(**kwargs) self.file = kwargs.get('file', None) self.class_name = kwargs.get('class_name', None) self.files = kwargs.get('files', None) self.archives = kwargs.get('archives', None) self.jars = kwargs.get('jars', None) self.py_files = kwargs.get('py_files', None) self.compute_name = kwargs.get('compute_name', None) self.queue = kwargs.get('queue', None) self.driver_memory = kwargs.get('driver_memory', None) self.driver_cores = kwargs.get('driver_cores', None) self.executor_memory = kwargs.get('executor_memory', None) self.executor_cores = kwargs.get('executor_cores', None) self.number_executors = kwargs.get('number_executors', None) self.conf = kwargs.get('conf', None) self.name = kwargs.get('name', None) class AetherHyperDriveConfiguration(msrest.serialization.Model): """AetherHyperDriveConfiguration. :ivar hyper_drive_run_config: :vartype hyper_drive_run_config: str :ivar primary_metric_goal: :vartype primary_metric_goal: str :ivar primary_metric_name: :vartype primary_metric_name: str :ivar arguments: :vartype arguments: list[~flow.models.AetherArgumentAssignment] """ _attribute_map = { 'hyper_drive_run_config': {'key': 'hyperDriveRunConfig', 'type': 'str'}, 'primary_metric_goal': {'key': 'primaryMetricGoal', 'type': 'str'}, 'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'}, 'arguments': {'key': 'arguments', 'type': '[AetherArgumentAssignment]'}, } def __init__( self, **kwargs ): """ :keyword hyper_drive_run_config: :paramtype hyper_drive_run_config: str :keyword primary_metric_goal: :paramtype primary_metric_goal: str :keyword primary_metric_name: :paramtype primary_metric_name: str :keyword arguments: :paramtype arguments: list[~flow.models.AetherArgumentAssignment] """ super(AetherHyperDriveConfiguration, self).__init__(**kwargs) self.hyper_drive_run_config = kwargs.get('hyper_drive_run_config', None) self.primary_metric_goal = kwargs.get('primary_metric_goal', None) self.primary_metric_name = kwargs.get('primary_metric_name', None) self.arguments = kwargs.get('arguments', None) class AetherIdentitySetting(msrest.serialization.Model): """AetherIdentitySetting. :ivar type: Possible values include: "UserIdentity", "Managed", "AMLToken". :vartype type: str or ~flow.models.AetherIdentityType :ivar client_id: :vartype client_id: str :ivar object_id: :vartype object_id: str :ivar msi_resource_id: :vartype msi_resource_id: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'client_id': {'key': 'clientId', 'type': 'str'}, 'object_id': {'key': 'objectId', 'type': 'str'}, 'msi_resource_id': {'key': 'msiResourceId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword type: Possible values include: "UserIdentity", "Managed", "AMLToken". :paramtype type: str or ~flow.models.AetherIdentityType :keyword client_id: :paramtype client_id: str :keyword object_id: :paramtype object_id: str :keyword msi_resource_id: :paramtype msi_resource_id: str """ super(AetherIdentitySetting, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.client_id = kwargs.get('client_id', None) self.object_id = kwargs.get('object_id', None) self.msi_resource_id = kwargs.get('msi_resource_id', None) class AetherImportDataTask(msrest.serialization.Model): """AetherImportDataTask. :ivar data_transfer_source: :vartype data_transfer_source: ~flow.models.AetherDataTransferSource """ _attribute_map = { 'data_transfer_source': {'key': 'DataTransferSource', 'type': 'AetherDataTransferSource'}, } def __init__( self, **kwargs ): """ :keyword data_transfer_source: :paramtype data_transfer_source: ~flow.models.AetherDataTransferSource """ super(AetherImportDataTask, self).__init__(**kwargs) self.data_transfer_source = kwargs.get('data_transfer_source', None) class AetherInputSetting(msrest.serialization.Model): """AetherInputSetting. :ivar name: :vartype name: str :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AetherDataStoreMode :ivar path_on_compute: :vartype path_on_compute: str :ivar options: This is a dictionary. :vartype options: dict[str, str] :ivar additional_transformations: :vartype additional_transformations: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'options': {'key': 'options', 'type': '{str}'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode :keyword path_on_compute: :paramtype path_on_compute: str :keyword options: This is a dictionary. :paramtype options: dict[str, str] :keyword additional_transformations: :paramtype additional_transformations: str """ super(AetherInputSetting, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.data_store_mode = kwargs.get('data_store_mode', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.options = kwargs.get('options', None) self.additional_transformations = kwargs.get('additional_transformations', None) class AetherInteractiveConfig(msrest.serialization.Model): """AetherInteractiveConfig. :ivar is_ssh_enabled: :vartype is_ssh_enabled: bool :ivar ssh_public_key: :vartype ssh_public_key: str :ivar is_i_python_enabled: :vartype is_i_python_enabled: bool :ivar is_tensor_board_enabled: :vartype is_tensor_board_enabled: bool :ivar interactive_port: :vartype interactive_port: int """ _attribute_map = { 'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'}, 'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'}, 'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'}, 'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'}, 'interactive_port': {'key': 'interactivePort', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword is_ssh_enabled: :paramtype is_ssh_enabled: bool :keyword ssh_public_key: :paramtype ssh_public_key: str :keyword is_i_python_enabled: :paramtype is_i_python_enabled: bool :keyword is_tensor_board_enabled: :paramtype is_tensor_board_enabled: bool :keyword interactive_port: :paramtype interactive_port: int """ super(AetherInteractiveConfig, self).__init__(**kwargs) self.is_ssh_enabled = kwargs.get('is_ssh_enabled', None) self.ssh_public_key = kwargs.get('ssh_public_key', None) self.is_i_python_enabled = kwargs.get('is_i_python_enabled', None) self.is_tensor_board_enabled = kwargs.get('is_tensor_board_enabled', None) self.interactive_port = kwargs.get('interactive_port', None) class AetherK8SConfiguration(msrest.serialization.Model): """AetherK8SConfiguration. :ivar max_retry_count: :vartype max_retry_count: int :ivar resource_configuration: :vartype resource_configuration: ~flow.models.AetherResourceConfig :ivar priority_configuration: :vartype priority_configuration: ~flow.models.AetherPriorityConfig :ivar interactive_configuration: :vartype interactive_configuration: ~flow.models.AetherInteractiveConfig """ _attribute_map = { 'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'}, 'resource_configuration': {'key': 'resourceConfiguration', 'type': 'AetherResourceConfig'}, 'priority_configuration': {'key': 'priorityConfiguration', 'type': 'AetherPriorityConfig'}, 'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'AetherInteractiveConfig'}, } def __init__( self, **kwargs ): """ :keyword max_retry_count: :paramtype max_retry_count: int :keyword resource_configuration: :paramtype resource_configuration: ~flow.models.AetherResourceConfig :keyword priority_configuration: :paramtype priority_configuration: ~flow.models.AetherPriorityConfig :keyword interactive_configuration: :paramtype interactive_configuration: ~flow.models.AetherInteractiveConfig """ super(AetherK8SConfiguration, self).__init__(**kwargs) self.max_retry_count = kwargs.get('max_retry_count', None) self.resource_configuration = kwargs.get('resource_configuration', None) self.priority_configuration = kwargs.get('priority_configuration', None) self.interactive_configuration = kwargs.get('interactive_configuration', None) class AetherLegacyDataPath(msrest.serialization.Model): """AetherLegacyDataPath. :ivar data_store_name: :vartype data_store_name: str :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AetherDataStoreMode :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode :keyword relative_path: :paramtype relative_path: str """ super(AetherLegacyDataPath, self).__init__(**kwargs) self.data_store_name = kwargs.get('data_store_name', None) self.data_store_mode = kwargs.get('data_store_mode', None) self.relative_path = kwargs.get('relative_path', None) class AetherLimitSettings(msrest.serialization.Model): """AetherLimitSettings. :ivar max_trials: :vartype max_trials: int :ivar timeout: :vartype timeout: str :ivar trial_timeout: :vartype trial_timeout: str :ivar max_concurrent_trials: :vartype max_concurrent_trials: int :ivar max_cores_per_trial: :vartype max_cores_per_trial: int :ivar exit_score: :vartype exit_score: float :ivar enable_early_termination: :vartype enable_early_termination: bool :ivar max_nodes: :vartype max_nodes: int """ _attribute_map = { 'max_trials': {'key': 'maxTrials', 'type': 'int'}, 'timeout': {'key': 'timeout', 'type': 'str'}, 'trial_timeout': {'key': 'trialTimeout', 'type': 'str'}, 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, 'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'}, 'exit_score': {'key': 'exitScore', 'type': 'float'}, 'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'}, 'max_nodes': {'key': 'maxNodes', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword max_trials: :paramtype max_trials: int :keyword timeout: :paramtype timeout: str :keyword trial_timeout: :paramtype trial_timeout: str :keyword max_concurrent_trials: :paramtype max_concurrent_trials: int :keyword max_cores_per_trial: :paramtype max_cores_per_trial: int :keyword exit_score: :paramtype exit_score: float :keyword enable_early_termination: :paramtype enable_early_termination: bool :keyword max_nodes: :paramtype max_nodes: int """ super(AetherLimitSettings, self).__init__(**kwargs) self.max_trials = kwargs.get('max_trials', None) self.timeout = kwargs.get('timeout', None) self.trial_timeout = kwargs.get('trial_timeout', None) self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None) self.max_cores_per_trial = kwargs.get('max_cores_per_trial', None) self.exit_score = kwargs.get('exit_score', None) self.enable_early_termination = kwargs.get('enable_early_termination', None) self.max_nodes = kwargs.get('max_nodes', None) class AetherMlcComputeInfo(msrest.serialization.Model): """AetherMlcComputeInfo. :ivar mlc_compute_type: :vartype mlc_compute_type: str """ _attribute_map = { 'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword mlc_compute_type: :paramtype mlc_compute_type: str """ super(AetherMlcComputeInfo, self).__init__(**kwargs) self.mlc_compute_type = kwargs.get('mlc_compute_type', None) class AetherModuleEntity(msrest.serialization.Model): """AetherModuleEntity. :ivar last_updated_by: :vartype last_updated_by: ~flow.models.AetherCreatedBy :ivar display_name: :vartype display_name: str :ivar module_execution_type: :vartype module_execution_type: str :ivar module_type: Possible values include: "None", "BatchInferencing". :vartype module_type: str or ~flow.models.AetherModuleType :ivar module_type_version: :vartype module_type_version: str :ivar resource_requirements: :vartype resource_requirements: ~flow.models.AetherResourceModel :ivar machine_cluster: :vartype machine_cluster: list[str] :ivar default_compliance_cluster: :vartype default_compliance_cluster: str :ivar repository_type: Possible values include: "None", "Other", "Git", "SourceDepot", "Cosmos". :vartype repository_type: str or ~flow.models.AetherRepositoryType :ivar relative_path_to_source_code: :vartype relative_path_to_source_code: str :ivar commit_id: :vartype commit_id: str :ivar code_review_link: :vartype code_review_link: str :ivar unit_tests_available: :vartype unit_tests_available: bool :ivar is_compressed: :vartype is_compressed: bool :ivar execution_environment: Possible values include: "ExeWorkerMachine", "DockerContainerWithoutNetwork", "DockerContainerWithNetwork", "HyperVWithoutNetwork", "HyperVWithNetwork". :vartype execution_environment: str or ~flow.models.AetherExecutionEnvironment :ivar is_output_markup_enabled: :vartype is_output_markup_enabled: bool :ivar docker_image_id: :vartype docker_image_id: str :ivar docker_image_reference: :vartype docker_image_reference: str :ivar docker_image_security_groups: :vartype docker_image_security_groups: str :ivar extended_properties: :vartype extended_properties: ~flow.models.AetherModuleExtendedProperties :ivar deployment_source: Possible values include: "Client", "AutoDeployment", "Vsts". :vartype deployment_source: str or ~flow.models.AetherModuleDeploymentSource :ivar deployment_source_metadata: :vartype deployment_source_metadata: str :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str :ivar kv_tags: This is a dictionary. :vartype kv_tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar created_by: :vartype created_by: ~flow.models.AetherCreatedBy :ivar runconfig: :vartype runconfig: str :ivar cloud_settings: :vartype cloud_settings: ~flow.models.AetherCloudSettings :ivar category: :vartype category: str :ivar step_type: :vartype step_type: str :ivar stage: :vartype stage: str :ivar upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed". :vartype upload_state: str or ~flow.models.AetherUploadState :ivar source_code_location: :vartype source_code_location: str :ivar size_in_bytes: :vartype size_in_bytes: long :ivar download_location: :vartype download_location: str :ivar data_location: :vartype data_location: ~flow.models.AetherDataLocation :ivar scripting_runtime_id: :vartype scripting_runtime_id: str :ivar interface_documentation: :vartype interface_documentation: ~flow.models.AetherEntityInterfaceDocumentation :ivar is_eyes_on: :vartype is_eyes_on: bool :ivar compliance_cluster: :vartype compliance_cluster: str :ivar is_deterministic: :vartype is_deterministic: bool :ivar information_url: :vartype information_url: str :ivar is_experiment_id_in_parameters: :vartype is_experiment_id_in_parameters: bool :ivar interface_string: :vartype interface_string: str :ivar default_parameters: This is a dictionary. :vartype default_parameters: dict[str, str] :ivar structured_interface: :vartype structured_interface: ~flow.models.AetherStructuredInterface :ivar family_id: :vartype family_id: str :ivar name: :vartype name: str :ivar hash: :vartype hash: str :ivar description: :vartype description: str :ivar version: :vartype version: str :ivar sequence_number_in_family: :vartype sequence_number_in_family: int :ivar owner: :vartype owner: str :ivar azure_tenant_id: :vartype azure_tenant_id: str :ivar azure_user_id: :vartype azure_user_id: str :ivar collaborators: :vartype collaborators: list[str] :ivar id: :vartype id: str :ivar workspace_id: :vartype workspace_id: str :ivar etag: :vartype etag: str :ivar tags: A set of tags. :vartype tags: list[str] :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.AetherEntityStatus """ _attribute_map = { 'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'AetherCreatedBy'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'module_execution_type': {'key': 'moduleExecutionType', 'type': 'str'}, 'module_type': {'key': 'moduleType', 'type': 'str'}, 'module_type_version': {'key': 'moduleTypeVersion', 'type': 'str'}, 'resource_requirements': {'key': 'resourceRequirements', 'type': 'AetherResourceModel'}, 'machine_cluster': {'key': 'machineCluster', 'type': '[str]'}, 'default_compliance_cluster': {'key': 'defaultComplianceCluster', 'type': 'str'}, 'repository_type': {'key': 'repositoryType', 'type': 'str'}, 'relative_path_to_source_code': {'key': 'relativePathToSourceCode', 'type': 'str'}, 'commit_id': {'key': 'commitId', 'type': 'str'}, 'code_review_link': {'key': 'codeReviewLink', 'type': 'str'}, 'unit_tests_available': {'key': 'unitTestsAvailable', 'type': 'bool'}, 'is_compressed': {'key': 'isCompressed', 'type': 'bool'}, 'execution_environment': {'key': 'executionEnvironment', 'type': 'str'}, 'is_output_markup_enabled': {'key': 'isOutputMarkupEnabled', 'type': 'bool'}, 'docker_image_id': {'key': 'dockerImageId', 'type': 'str'}, 'docker_image_reference': {'key': 'dockerImageReference', 'type': 'str'}, 'docker_image_security_groups': {'key': 'dockerImageSecurityGroups', 'type': 'str'}, 'extended_properties': {'key': 'extendedProperties', 'type': 'AetherModuleExtendedProperties'}, 'deployment_source': {'key': 'deploymentSource', 'type': 'str'}, 'deployment_source_metadata': {'key': 'deploymentSourceMetadata', 'type': 'str'}, 'identifier_hash': {'key': 'identifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'identifierHashV2', 'type': 'str'}, 'kv_tags': {'key': 'kvTags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'created_by': {'key': 'createdBy', 'type': 'AetherCreatedBy'}, 'runconfig': {'key': 'runconfig', 'type': 'str'}, 'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'}, 'category': {'key': 'category', 'type': 'str'}, 'step_type': {'key': 'stepType', 'type': 'str'}, 'stage': {'key': 'stage', 'type': 'str'}, 'upload_state': {'key': 'uploadState', 'type': 'str'}, 'source_code_location': {'key': 'sourceCodeLocation', 'type': 'str'}, 'size_in_bytes': {'key': 'sizeInBytes', 'type': 'long'}, 'download_location': {'key': 'downloadLocation', 'type': 'str'}, 'data_location': {'key': 'dataLocation', 'type': 'AetherDataLocation'}, 'scripting_runtime_id': {'key': 'scriptingRuntimeId', 'type': 'str'}, 'interface_documentation': {'key': 'interfaceDocumentation', 'type': 'AetherEntityInterfaceDocumentation'}, 'is_eyes_on': {'key': 'isEyesOn', 'type': 'bool'}, 'compliance_cluster': {'key': 'complianceCluster', 'type': 'str'}, 'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'}, 'information_url': {'key': 'informationUrl', 'type': 'str'}, 'is_experiment_id_in_parameters': {'key': 'isExperimentIdInParameters', 'type': 'bool'}, 'interface_string': {'key': 'interfaceString', 'type': 'str'}, 'default_parameters': {'key': 'defaultParameters', 'type': '{str}'}, 'structured_interface': {'key': 'structuredInterface', 'type': 'AetherStructuredInterface'}, 'family_id': {'key': 'familyId', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'hash': {'key': 'hash', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'sequence_number_in_family': {'key': 'sequenceNumberInFamily', 'type': 'int'}, 'owner': {'key': 'owner', 'type': 'str'}, 'azure_tenant_id': {'key': 'azureTenantId', 'type': 'str'}, 'azure_user_id': {'key': 'azureUserId', 'type': 'str'}, 'collaborators': {'key': 'collaborators', 'type': '[str]'}, 'id': {'key': 'id', 'type': 'str'}, 'workspace_id': {'key': 'workspaceId', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '[str]'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword last_updated_by: :paramtype last_updated_by: ~flow.models.AetherCreatedBy :keyword display_name: :paramtype display_name: str :keyword module_execution_type: :paramtype module_execution_type: str :keyword module_type: Possible values include: "None", "BatchInferencing". :paramtype module_type: str or ~flow.models.AetherModuleType :keyword module_type_version: :paramtype module_type_version: str :keyword resource_requirements: :paramtype resource_requirements: ~flow.models.AetherResourceModel :keyword machine_cluster: :paramtype machine_cluster: list[str] :keyword default_compliance_cluster: :paramtype default_compliance_cluster: str :keyword repository_type: Possible values include: "None", "Other", "Git", "SourceDepot", "Cosmos". :paramtype repository_type: str or ~flow.models.AetherRepositoryType :keyword relative_path_to_source_code: :paramtype relative_path_to_source_code: str :keyword commit_id: :paramtype commit_id: str :keyword code_review_link: :paramtype code_review_link: str :keyword unit_tests_available: :paramtype unit_tests_available: bool :keyword is_compressed: :paramtype is_compressed: bool :keyword execution_environment: Possible values include: "ExeWorkerMachine", "DockerContainerWithoutNetwork", "DockerContainerWithNetwork", "HyperVWithoutNetwork", "HyperVWithNetwork". :paramtype execution_environment: str or ~flow.models.AetherExecutionEnvironment :keyword is_output_markup_enabled: :paramtype is_output_markup_enabled: bool :keyword docker_image_id: :paramtype docker_image_id: str :keyword docker_image_reference: :paramtype docker_image_reference: str :keyword docker_image_security_groups: :paramtype docker_image_security_groups: str :keyword extended_properties: :paramtype extended_properties: ~flow.models.AetherModuleExtendedProperties :keyword deployment_source: Possible values include: "Client", "AutoDeployment", "Vsts". :paramtype deployment_source: str or ~flow.models.AetherModuleDeploymentSource :keyword deployment_source_metadata: :paramtype deployment_source_metadata: str :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str :keyword kv_tags: This is a dictionary. :paramtype kv_tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword created_by: :paramtype created_by: ~flow.models.AetherCreatedBy :keyword runconfig: :paramtype runconfig: str :keyword cloud_settings: :paramtype cloud_settings: ~flow.models.AetherCloudSettings :keyword category: :paramtype category: str :keyword step_type: :paramtype step_type: str :keyword stage: :paramtype stage: str :keyword upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed". :paramtype upload_state: str or ~flow.models.AetherUploadState :keyword source_code_location: :paramtype source_code_location: str :keyword size_in_bytes: :paramtype size_in_bytes: long :keyword download_location: :paramtype download_location: str :keyword data_location: :paramtype data_location: ~flow.models.AetherDataLocation :keyword scripting_runtime_id: :paramtype scripting_runtime_id: str :keyword interface_documentation: :paramtype interface_documentation: ~flow.models.AetherEntityInterfaceDocumentation :keyword is_eyes_on: :paramtype is_eyes_on: bool :keyword compliance_cluster: :paramtype compliance_cluster: str :keyword is_deterministic: :paramtype is_deterministic: bool :keyword information_url: :paramtype information_url: str :keyword is_experiment_id_in_parameters: :paramtype is_experiment_id_in_parameters: bool :keyword interface_string: :paramtype interface_string: str :keyword default_parameters: This is a dictionary. :paramtype default_parameters: dict[str, str] :keyword structured_interface: :paramtype structured_interface: ~flow.models.AetherStructuredInterface :keyword family_id: :paramtype family_id: str :keyword name: :paramtype name: str :keyword hash: :paramtype hash: str :keyword description: :paramtype description: str :keyword version: :paramtype version: str :keyword sequence_number_in_family: :paramtype sequence_number_in_family: int :keyword owner: :paramtype owner: str :keyword azure_tenant_id: :paramtype azure_tenant_id: str :keyword azure_user_id: :paramtype azure_user_id: str :keyword collaborators: :paramtype collaborators: list[str] :keyword id: :paramtype id: str :keyword workspace_id: :paramtype workspace_id: str :keyword etag: :paramtype etag: str :keyword tags: A set of tags. :paramtype tags: list[str] :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.AetherEntityStatus """ super(AetherModuleEntity, self).__init__(**kwargs) self.last_updated_by = kwargs.get('last_updated_by', None) self.display_name = kwargs.get('display_name', None) self.module_execution_type = kwargs.get('module_execution_type', None) self.module_type = kwargs.get('module_type', None) self.module_type_version = kwargs.get('module_type_version', None) self.resource_requirements = kwargs.get('resource_requirements', None) self.machine_cluster = kwargs.get('machine_cluster', None) self.default_compliance_cluster = kwargs.get('default_compliance_cluster', None) self.repository_type = kwargs.get('repository_type', None) self.relative_path_to_source_code = kwargs.get('relative_path_to_source_code', None) self.commit_id = kwargs.get('commit_id', None) self.code_review_link = kwargs.get('code_review_link', None) self.unit_tests_available = kwargs.get('unit_tests_available', None) self.is_compressed = kwargs.get('is_compressed', None) self.execution_environment = kwargs.get('execution_environment', None) self.is_output_markup_enabled = kwargs.get('is_output_markup_enabled', None) self.docker_image_id = kwargs.get('docker_image_id', None) self.docker_image_reference = kwargs.get('docker_image_reference', None) self.docker_image_security_groups = kwargs.get('docker_image_security_groups', None) self.extended_properties = kwargs.get('extended_properties', None) self.deployment_source = kwargs.get('deployment_source', None) self.deployment_source_metadata = kwargs.get('deployment_source_metadata', None) self.identifier_hash = kwargs.get('identifier_hash', None) self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None) self.kv_tags = kwargs.get('kv_tags', None) self.properties = kwargs.get('properties', None) self.created_by = kwargs.get('created_by', None) self.runconfig = kwargs.get('runconfig', None) self.cloud_settings = kwargs.get('cloud_settings', None) self.category = kwargs.get('category', None) self.step_type = kwargs.get('step_type', None) self.stage = kwargs.get('stage', None) self.upload_state = kwargs.get('upload_state', None) self.source_code_location = kwargs.get('source_code_location', None) self.size_in_bytes = kwargs.get('size_in_bytes', None) self.download_location = kwargs.get('download_location', None) self.data_location = kwargs.get('data_location', None) self.scripting_runtime_id = kwargs.get('scripting_runtime_id', None) self.interface_documentation = kwargs.get('interface_documentation', None) self.is_eyes_on = kwargs.get('is_eyes_on', None) self.compliance_cluster = kwargs.get('compliance_cluster', None) self.is_deterministic = kwargs.get('is_deterministic', None) self.information_url = kwargs.get('information_url', None) self.is_experiment_id_in_parameters = kwargs.get('is_experiment_id_in_parameters', None) self.interface_string = kwargs.get('interface_string', None) self.default_parameters = kwargs.get('default_parameters', None) self.structured_interface = kwargs.get('structured_interface', None) self.family_id = kwargs.get('family_id', None) self.name = kwargs.get('name', None) self.hash = kwargs.get('hash', None) self.description = kwargs.get('description', None) self.version = kwargs.get('version', None) self.sequence_number_in_family = kwargs.get('sequence_number_in_family', None) self.owner = kwargs.get('owner', None) self.azure_tenant_id = kwargs.get('azure_tenant_id', None) self.azure_user_id = kwargs.get('azure_user_id', None) self.collaborators = kwargs.get('collaborators', None) self.id = kwargs.get('id', None) self.workspace_id = kwargs.get('workspace_id', None) self.etag = kwargs.get('etag', None) self.tags = kwargs.get('tags', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) self.entity_status = kwargs.get('entity_status', None) class AetherModuleExtendedProperties(msrest.serialization.Model): """AetherModuleExtendedProperties. :ivar auto_deployed_artifact: :vartype auto_deployed_artifact: ~flow.models.AetherBuildArtifactInfo :ivar script_needs_approval: :vartype script_needs_approval: bool """ _attribute_map = { 'auto_deployed_artifact': {'key': 'autoDeployedArtifact', 'type': 'AetherBuildArtifactInfo'}, 'script_needs_approval': {'key': 'scriptNeedsApproval', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword auto_deployed_artifact: :paramtype auto_deployed_artifact: ~flow.models.AetherBuildArtifactInfo :keyword script_needs_approval: :paramtype script_needs_approval: bool """ super(AetherModuleExtendedProperties, self).__init__(**kwargs) self.auto_deployed_artifact = kwargs.get('auto_deployed_artifact', None) self.script_needs_approval = kwargs.get('script_needs_approval', None) class AetherNCrossValidations(msrest.serialization.Model): """AetherNCrossValidations. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.AetherNCrossValidationMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.AetherNCrossValidationMode :keyword value: :paramtype value: int """ super(AetherNCrossValidations, self).__init__(**kwargs) self.mode = kwargs.get('mode', None) self.value = kwargs.get('value', None) class AetherOutputSetting(msrest.serialization.Model): """AetherOutputSetting. :ivar name: :vartype name: str :ivar data_store_name: :vartype data_store_name: str :ivar data_store_name_parameter_assignment: :vartype data_store_name_parameter_assignment: ~flow.models.AetherParameterAssignment :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AetherDataStoreMode :ivar data_store_mode_parameter_assignment: :vartype data_store_mode_parameter_assignment: ~flow.models.AetherParameterAssignment :ivar path_on_compute: :vartype path_on_compute: str :ivar path_on_compute_parameter_assignment: :vartype path_on_compute_parameter_assignment: ~flow.models.AetherParameterAssignment :ivar overwrite: :vartype overwrite: bool :ivar data_reference_name: :vartype data_reference_name: str :ivar web_service_port: :vartype web_service_port: str :ivar dataset_registration: :vartype dataset_registration: ~flow.models.AetherDatasetRegistration :ivar dataset_output_options: :vartype dataset_output_options: ~flow.models.AetherDatasetOutputOptions :ivar asset_output_settings: :vartype asset_output_settings: ~flow.models.AetherAssetOutputSettings :ivar parameter_name: :vartype parameter_name: str :ivar asset_output_settings_parameter_name: :vartype asset_output_settings_parameter_name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_name_parameter_assignment': {'key': 'DataStoreNameParameterAssignment', 'type': 'AetherParameterAssignment'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'data_store_mode_parameter_assignment': {'key': 'DataStoreModeParameterAssignment', 'type': 'AetherParameterAssignment'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'path_on_compute_parameter_assignment': {'key': 'PathOnComputeParameterAssignment', 'type': 'AetherParameterAssignment'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'}, 'web_service_port': {'key': 'webServicePort', 'type': 'str'}, 'dataset_registration': {'key': 'datasetRegistration', 'type': 'AetherDatasetRegistration'}, 'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'AetherDatasetOutputOptions'}, 'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AetherAssetOutputSettings'}, 'parameter_name': {'key': 'parameterName', 'type': 'str'}, 'asset_output_settings_parameter_name': {'key': 'AssetOutputSettingsParameterName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_name_parameter_assignment: :paramtype data_store_name_parameter_assignment: ~flow.models.AetherParameterAssignment :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode :keyword data_store_mode_parameter_assignment: :paramtype data_store_mode_parameter_assignment: ~flow.models.AetherParameterAssignment :keyword path_on_compute: :paramtype path_on_compute: str :keyword path_on_compute_parameter_assignment: :paramtype path_on_compute_parameter_assignment: ~flow.models.AetherParameterAssignment :keyword overwrite: :paramtype overwrite: bool :keyword data_reference_name: :paramtype data_reference_name: str :keyword web_service_port: :paramtype web_service_port: str :keyword dataset_registration: :paramtype dataset_registration: ~flow.models.AetherDatasetRegistration :keyword dataset_output_options: :paramtype dataset_output_options: ~flow.models.AetherDatasetOutputOptions :keyword asset_output_settings: :paramtype asset_output_settings: ~flow.models.AetherAssetOutputSettings :keyword parameter_name: :paramtype parameter_name: str :keyword asset_output_settings_parameter_name: :paramtype asset_output_settings_parameter_name: str """ super(AetherOutputSetting, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.data_store_name = kwargs.get('data_store_name', None) self.data_store_name_parameter_assignment = kwargs.get('data_store_name_parameter_assignment', None) self.data_store_mode = kwargs.get('data_store_mode', None) self.data_store_mode_parameter_assignment = kwargs.get('data_store_mode_parameter_assignment', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.path_on_compute_parameter_assignment = kwargs.get('path_on_compute_parameter_assignment', None) self.overwrite = kwargs.get('overwrite', None) self.data_reference_name = kwargs.get('data_reference_name', None) self.web_service_port = kwargs.get('web_service_port', None) self.dataset_registration = kwargs.get('dataset_registration', None) self.dataset_output_options = kwargs.get('dataset_output_options', None) self.asset_output_settings = kwargs.get('asset_output_settings', None) self.parameter_name = kwargs.get('parameter_name', None) self.asset_output_settings_parameter_name = kwargs.get('asset_output_settings_parameter_name', None) class AetherParallelForControlFlowInfo(msrest.serialization.Model): """AetherParallelForControlFlowInfo. :ivar parallel_for_items_input: :vartype parallel_for_items_input: ~flow.models.AetherParameterAssignment """ _attribute_map = { 'parallel_for_items_input': {'key': 'parallelForItemsInput', 'type': 'AetherParameterAssignment'}, } def __init__( self, **kwargs ): """ :keyword parallel_for_items_input: :paramtype parallel_for_items_input: ~flow.models.AetherParameterAssignment """ super(AetherParallelForControlFlowInfo, self).__init__(**kwargs) self.parallel_for_items_input = kwargs.get('parallel_for_items_input', None) class AetherParameterAssignment(msrest.serialization.Model): """AetherParameterAssignment. :ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition". :vartype value_type: str or ~flow.models.AetherParameterValueType :ivar assignments_to_concatenate: :vartype assignments_to_concatenate: list[~flow.models.AetherParameterAssignment] :ivar data_path_assignment: :vartype data_path_assignment: ~flow.models.AetherLegacyDataPath :ivar data_set_definition_value_assignment: :vartype data_set_definition_value_assignment: ~flow.models.AetherDataSetDefinitionValue :ivar name: :vartype name: str :ivar value: :vartype value: str """ _attribute_map = { 'value_type': {'key': 'valueType', 'type': 'str'}, 'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[AetherParameterAssignment]'}, 'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'AetherLegacyDataPath'}, 'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'AetherDataSetDefinitionValue'}, 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition". :paramtype value_type: str or ~flow.models.AetherParameterValueType :keyword assignments_to_concatenate: :paramtype assignments_to_concatenate: list[~flow.models.AetherParameterAssignment] :keyword data_path_assignment: :paramtype data_path_assignment: ~flow.models.AetherLegacyDataPath :keyword data_set_definition_value_assignment: :paramtype data_set_definition_value_assignment: ~flow.models.AetherDataSetDefinitionValue :keyword name: :paramtype name: str :keyword value: :paramtype value: str """ super(AetherParameterAssignment, self).__init__(**kwargs) self.value_type = kwargs.get('value_type', None) self.assignments_to_concatenate = kwargs.get('assignments_to_concatenate', None) self.data_path_assignment = kwargs.get('data_path_assignment', None) self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None) self.name = kwargs.get('name', None) self.value = kwargs.get('value', None) class AetherPhillyHdfsReference(msrest.serialization.Model): """AetherPhillyHdfsReference. :ivar cluster: :vartype cluster: str :ivar vc: :vartype vc: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'cluster': {'key': 'cluster', 'type': 'str'}, 'vc': {'key': 'vc', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword cluster: :paramtype cluster: str :keyword vc: :paramtype vc: str :keyword relative_path: :paramtype relative_path: str """ super(AetherPhillyHdfsReference, self).__init__(**kwargs) self.cluster = kwargs.get('cluster', None) self.vc = kwargs.get('vc', None) self.relative_path = kwargs.get('relative_path', None) class AetherPortInfo(msrest.serialization.Model): """AetherPortInfo. :ivar node_id: :vartype node_id: str :ivar port_name: :vartype port_name: str :ivar graph_port_name: :vartype graph_port_name: str :ivar is_parameter: :vartype is_parameter: bool :ivar web_service_port: :vartype web_service_port: str """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, 'graph_port_name': {'key': 'graphPortName', 'type': 'str'}, 'is_parameter': {'key': 'isParameter', 'type': 'bool'}, 'web_service_port': {'key': 'webServicePort', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword port_name: :paramtype port_name: str :keyword graph_port_name: :paramtype graph_port_name: str :keyword is_parameter: :paramtype is_parameter: bool :keyword web_service_port: :paramtype web_service_port: str """ super(AetherPortInfo, self).__init__(**kwargs) self.node_id = kwargs.get('node_id', None) self.port_name = kwargs.get('port_name', None) self.graph_port_name = kwargs.get('graph_port_name', None) self.is_parameter = kwargs.get('is_parameter', None) self.web_service_port = kwargs.get('web_service_port', None) class AetherPriorityConfig(msrest.serialization.Model): """AetherPriorityConfig. :ivar job_priority: :vartype job_priority: int :ivar is_preemptible: :vartype is_preemptible: bool :ivar node_count_set: :vartype node_count_set: list[int] :ivar scale_interval: :vartype scale_interval: int """ _attribute_map = { 'job_priority': {'key': 'jobPriority', 'type': 'int'}, 'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'}, 'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'}, 'scale_interval': {'key': 'scaleInterval', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword job_priority: :paramtype job_priority: int :keyword is_preemptible: :paramtype is_preemptible: bool :keyword node_count_set: :paramtype node_count_set: list[int] :keyword scale_interval: :paramtype scale_interval: int """ super(AetherPriorityConfig, self).__init__(**kwargs) self.job_priority = kwargs.get('job_priority', None) self.is_preemptible = kwargs.get('is_preemptible', None) self.node_count_set = kwargs.get('node_count_set', None) self.scale_interval = kwargs.get('scale_interval', None) class AetherPriorityConfiguration(msrest.serialization.Model): """AetherPriorityConfiguration. :ivar cloud_priority: :vartype cloud_priority: int :ivar string_type_priority: :vartype string_type_priority: str """ _attribute_map = { 'cloud_priority': {'key': 'cloudPriority', 'type': 'int'}, 'string_type_priority': {'key': 'stringTypePriority', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword cloud_priority: :paramtype cloud_priority: int :keyword string_type_priority: :paramtype string_type_priority: str """ super(AetherPriorityConfiguration, self).__init__(**kwargs) self.cloud_priority = kwargs.get('cloud_priority', None) self.string_type_priority = kwargs.get('string_type_priority', None) class AetherRegisteredDataSetReference(msrest.serialization.Model): """AetherRegisteredDataSetReference. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar version: :vartype version: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword version: :paramtype version: str """ super(AetherRegisteredDataSetReference, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) class AetherRemoteDockerComputeInfo(msrest.serialization.Model): """AetherRemoteDockerComputeInfo. :ivar address: :vartype address: str :ivar username: :vartype username: str :ivar password: :vartype password: str :ivar private_key: :vartype private_key: str """ _attribute_map = { 'address': {'key': 'address', 'type': 'str'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'private_key': {'key': 'privateKey', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword address: :paramtype address: str :keyword username: :paramtype username: str :keyword password: :paramtype password: str :keyword private_key: :paramtype private_key: str """ super(AetherRemoteDockerComputeInfo, self).__init__(**kwargs) self.address = kwargs.get('address', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.private_key = kwargs.get('private_key', None) class AetherResourceAssignment(msrest.serialization.Model): """AetherResourceAssignment. :ivar attributes: Dictionary of :code:`<AetherResourceAttributeAssignment>`. :vartype attributes: dict[str, ~flow.models.AetherResourceAttributeAssignment] """ _attribute_map = { 'attributes': {'key': 'attributes', 'type': '{AetherResourceAttributeAssignment}'}, } def __init__( self, **kwargs ): """ :keyword attributes: Dictionary of :code:`<AetherResourceAttributeAssignment>`. :paramtype attributes: dict[str, ~flow.models.AetherResourceAttributeAssignment] """ super(AetherResourceAssignment, self).__init__(**kwargs) self.attributes = kwargs.get('attributes', None) class AetherResourceAttributeAssignment(msrest.serialization.Model): """AetherResourceAttributeAssignment. :ivar attribute: :vartype attribute: ~flow.models.AetherResourceAttributeDefinition :ivar operator: Possible values include: "Equal", "Contain", "GreaterOrEqual". :vartype operator: str or ~flow.models.AetherResourceOperator :ivar value: :vartype value: str """ _attribute_map = { 'attribute': {'key': 'attribute', 'type': 'AetherResourceAttributeDefinition'}, 'operator': {'key': 'operator', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword attribute: :paramtype attribute: ~flow.models.AetherResourceAttributeDefinition :keyword operator: Possible values include: "Equal", "Contain", "GreaterOrEqual". :paramtype operator: str or ~flow.models.AetherResourceOperator :keyword value: :paramtype value: str """ super(AetherResourceAttributeAssignment, self).__init__(**kwargs) self.attribute = kwargs.get('attribute', None) self.operator = kwargs.get('operator', None) self.value = kwargs.get('value', None) class AetherResourceAttributeDefinition(msrest.serialization.Model): """AetherResourceAttributeDefinition. :ivar name: :vartype name: str :ivar type: Possible values include: "String", "Double". :vartype type: str or ~flow.models.AetherResourceValueType :ivar units: :vartype units: str :ivar allowed_operators: :vartype allowed_operators: list[str or ~flow.models.AetherResourceOperator] """ _validation = { 'allowed_operators': {'unique': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'units': {'key': 'units', 'type': 'str'}, 'allowed_operators': {'key': 'allowedOperators', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: Possible values include: "String", "Double". :paramtype type: str or ~flow.models.AetherResourceValueType :keyword units: :paramtype units: str :keyword allowed_operators: :paramtype allowed_operators: list[str or ~flow.models.AetherResourceOperator] """ super(AetherResourceAttributeDefinition, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.type = kwargs.get('type', None) self.units = kwargs.get('units', None) self.allowed_operators = kwargs.get('allowed_operators', None) class AetherResourceConfig(msrest.serialization.Model): """AetherResourceConfig. :ivar gpu_count: :vartype gpu_count: int :ivar cpu_count: :vartype cpu_count: int :ivar memory_request_in_gb: :vartype memory_request_in_gb: int """ _attribute_map = { 'gpu_count': {'key': 'gpuCount', 'type': 'int'}, 'cpu_count': {'key': 'cpuCount', 'type': 'int'}, 'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword gpu_count: :paramtype gpu_count: int :keyword cpu_count: :paramtype cpu_count: int :keyword memory_request_in_gb: :paramtype memory_request_in_gb: int """ super(AetherResourceConfig, self).__init__(**kwargs) self.gpu_count = kwargs.get('gpu_count', None) self.cpu_count = kwargs.get('cpu_count', None) self.memory_request_in_gb = kwargs.get('memory_request_in_gb', None) class AetherResourceConfiguration(msrest.serialization.Model): """AetherResourceConfiguration. :ivar instance_count: :vartype instance_count: int :ivar instance_type: :vartype instance_type: str :ivar properties: Dictionary of :code:`<any>`. :vartype properties: dict[str, any] :ivar locations: :vartype locations: list[str] :ivar instance_priority: :vartype instance_priority: str :ivar quota_enforcement_resource_id: :vartype quota_enforcement_resource_id: str """ _attribute_map = { 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{object}'}, 'locations': {'key': 'locations', 'type': '[str]'}, 'instance_priority': {'key': 'instancePriority', 'type': 'str'}, 'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword instance_count: :paramtype instance_count: int :keyword instance_type: :paramtype instance_type: str :keyword properties: Dictionary of :code:`<any>`. :paramtype properties: dict[str, any] :keyword locations: :paramtype locations: list[str] :keyword instance_priority: :paramtype instance_priority: str :keyword quota_enforcement_resource_id: :paramtype quota_enforcement_resource_id: str """ super(AetherResourceConfiguration, self).__init__(**kwargs) self.instance_count = kwargs.get('instance_count', None) self.instance_type = kwargs.get('instance_type', None) self.properties = kwargs.get('properties', None) self.locations = kwargs.get('locations', None) self.instance_priority = kwargs.get('instance_priority', None) self.quota_enforcement_resource_id = kwargs.get('quota_enforcement_resource_id', None) class AetherResourceModel(msrest.serialization.Model): """AetherResourceModel. :ivar resources: :vartype resources: list[~flow.models.AetherResourceAssignment] """ _attribute_map = { 'resources': {'key': 'resources', 'type': '[AetherResourceAssignment]'}, } def __init__( self, **kwargs ): """ :keyword resources: :paramtype resources: list[~flow.models.AetherResourceAssignment] """ super(AetherResourceModel, self).__init__(**kwargs) self.resources = kwargs.get('resources', None) class AetherResourcesSetting(msrest.serialization.Model): """AetherResourcesSetting. :ivar instance_size: :vartype instance_size: str :ivar spark_version: :vartype spark_version: str """ _attribute_map = { 'instance_size': {'key': 'instanceSize', 'type': 'str'}, 'spark_version': {'key': 'sparkVersion', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword instance_size: :paramtype instance_size: str :keyword spark_version: :paramtype spark_version: str """ super(AetherResourcesSetting, self).__init__(**kwargs) self.instance_size = kwargs.get('instance_size', None) self.spark_version = kwargs.get('spark_version', None) class AetherSavedDataSetReference(msrest.serialization.Model): """AetherSavedDataSetReference. :ivar id: :vartype id: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str """ super(AetherSavedDataSetReference, self).__init__(**kwargs) self.id = kwargs.get('id', None) class AetherScopeCloudConfiguration(msrest.serialization.Model): """AetherScopeCloudConfiguration. :ivar input_path_suffixes: This is a dictionary. :vartype input_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment] :ivar output_path_suffixes: This is a dictionary. :vartype output_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment] :ivar user_alias: :vartype user_alias: str :ivar tokens: :vartype tokens: int :ivar auto_token: :vartype auto_token: int :ivar vcp: :vartype vcp: float """ _attribute_map = { 'input_path_suffixes': {'key': 'inputPathSuffixes', 'type': '{AetherArgumentAssignment}'}, 'output_path_suffixes': {'key': 'outputPathSuffixes', 'type': '{AetherArgumentAssignment}'}, 'user_alias': {'key': 'userAlias', 'type': 'str'}, 'tokens': {'key': 'tokens', 'type': 'int'}, 'auto_token': {'key': 'autoToken', 'type': 'int'}, 'vcp': {'key': 'vcp', 'type': 'float'}, } def __init__( self, **kwargs ): """ :keyword input_path_suffixes: This is a dictionary. :paramtype input_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment] :keyword output_path_suffixes: This is a dictionary. :paramtype output_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment] :keyword user_alias: :paramtype user_alias: str :keyword tokens: :paramtype tokens: int :keyword auto_token: :paramtype auto_token: int :keyword vcp: :paramtype vcp: float """ super(AetherScopeCloudConfiguration, self).__init__(**kwargs) self.input_path_suffixes = kwargs.get('input_path_suffixes', None) self.output_path_suffixes = kwargs.get('output_path_suffixes', None) self.user_alias = kwargs.get('user_alias', None) self.tokens = kwargs.get('tokens', None) self.auto_token = kwargs.get('auto_token', None) self.vcp = kwargs.get('vcp', None) class AetherSeasonality(msrest.serialization.Model): """AetherSeasonality. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.AetherSeasonalityMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.AetherSeasonalityMode :keyword value: :paramtype value: int """ super(AetherSeasonality, self).__init__(**kwargs) self.mode = kwargs.get('mode', None) self.value = kwargs.get('value', None) class AetherSqlDataPath(msrest.serialization.Model): """AetherSqlDataPath. :ivar sql_table_name: :vartype sql_table_name: str :ivar sql_query: :vartype sql_query: str :ivar sql_stored_procedure_name: :vartype sql_stored_procedure_name: str :ivar sql_stored_procedure_params: :vartype sql_stored_procedure_params: list[~flow.models.AetherStoredProcedureParameter] """ _attribute_map = { 'sql_table_name': {'key': 'sqlTableName', 'type': 'str'}, 'sql_query': {'key': 'sqlQuery', 'type': 'str'}, 'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'}, 'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[AetherStoredProcedureParameter]'}, } def __init__( self, **kwargs ): """ :keyword sql_table_name: :paramtype sql_table_name: str :keyword sql_query: :paramtype sql_query: str :keyword sql_stored_procedure_name: :paramtype sql_stored_procedure_name: str :keyword sql_stored_procedure_params: :paramtype sql_stored_procedure_params: list[~flow.models.AetherStoredProcedureParameter] """ super(AetherSqlDataPath, self).__init__(**kwargs) self.sql_table_name = kwargs.get('sql_table_name', None) self.sql_query = kwargs.get('sql_query', None) self.sql_stored_procedure_name = kwargs.get('sql_stored_procedure_name', None) self.sql_stored_procedure_params = kwargs.get('sql_stored_procedure_params', None) class AetherStackEnsembleSettings(msrest.serialization.Model): """AetherStackEnsembleSettings. :ivar stack_meta_learner_type: Possible values include: "None", "LogisticRegression", "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", "LightGBMRegressor", "LinearRegression". :vartype stack_meta_learner_type: str or ~flow.models.AetherStackMetaLearnerType :ivar stack_meta_learner_train_percentage: :vartype stack_meta_learner_train_percentage: float :ivar stack_meta_learner_k_wargs: Anything. :vartype stack_meta_learner_k_wargs: any """ _attribute_map = { 'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'}, 'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'}, 'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'}, } def __init__( self, **kwargs ): """ :keyword stack_meta_learner_type: Possible values include: "None", "LogisticRegression", "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", "LightGBMRegressor", "LinearRegression". :paramtype stack_meta_learner_type: str or ~flow.models.AetherStackMetaLearnerType :keyword stack_meta_learner_train_percentage: :paramtype stack_meta_learner_train_percentage: float :keyword stack_meta_learner_k_wargs: Anything. :paramtype stack_meta_learner_k_wargs: any """ super(AetherStackEnsembleSettings, self).__init__(**kwargs) self.stack_meta_learner_type = kwargs.get('stack_meta_learner_type', None) self.stack_meta_learner_train_percentage = kwargs.get('stack_meta_learner_train_percentage', None) self.stack_meta_learner_k_wargs = kwargs.get('stack_meta_learner_k_wargs', None) class AetherStoredProcedureParameter(msrest.serialization.Model): """AetherStoredProcedureParameter. :ivar name: :vartype name: str :ivar value: :vartype value: str :ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date". :vartype type: str or ~flow.models.AetherStoredProcedureParameterType """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword value: :paramtype value: str :keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date". :paramtype type: str or ~flow.models.AetherStoredProcedureParameterType """ super(AetherStoredProcedureParameter, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.value = kwargs.get('value', None) self.type = kwargs.get('type', None) class AetherStructuredInterface(msrest.serialization.Model): """AetherStructuredInterface. :ivar command_line_pattern: :vartype command_line_pattern: str :ivar inputs: :vartype inputs: list[~flow.models.AetherStructuredInterfaceInput] :ivar outputs: :vartype outputs: list[~flow.models.AetherStructuredInterfaceOutput] :ivar control_outputs: :vartype control_outputs: list[~flow.models.AetherControlOutput] :ivar parameters: :vartype parameters: list[~flow.models.AetherStructuredInterfaceParameter] :ivar metadata_parameters: :vartype metadata_parameters: list[~flow.models.AetherStructuredInterfaceParameter] :ivar arguments: :vartype arguments: list[~flow.models.AetherArgumentAssignment] """ _attribute_map = { 'command_line_pattern': {'key': 'commandLinePattern', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '[AetherStructuredInterfaceInput]'}, 'outputs': {'key': 'outputs', 'type': '[AetherStructuredInterfaceOutput]'}, 'control_outputs': {'key': 'controlOutputs', 'type': '[AetherControlOutput]'}, 'parameters': {'key': 'parameters', 'type': '[AetherStructuredInterfaceParameter]'}, 'metadata_parameters': {'key': 'metadataParameters', 'type': '[AetherStructuredInterfaceParameter]'}, 'arguments': {'key': 'arguments', 'type': '[AetherArgumentAssignment]'}, } def __init__( self, **kwargs ): """ :keyword command_line_pattern: :paramtype command_line_pattern: str :keyword inputs: :paramtype inputs: list[~flow.models.AetherStructuredInterfaceInput] :keyword outputs: :paramtype outputs: list[~flow.models.AetherStructuredInterfaceOutput] :keyword control_outputs: :paramtype control_outputs: list[~flow.models.AetherControlOutput] :keyword parameters: :paramtype parameters: list[~flow.models.AetherStructuredInterfaceParameter] :keyword metadata_parameters: :paramtype metadata_parameters: list[~flow.models.AetherStructuredInterfaceParameter] :keyword arguments: :paramtype arguments: list[~flow.models.AetherArgumentAssignment] """ super(AetherStructuredInterface, self).__init__(**kwargs) self.command_line_pattern = kwargs.get('command_line_pattern', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.control_outputs = kwargs.get('control_outputs', None) self.parameters = kwargs.get('parameters', None) self.metadata_parameters = kwargs.get('metadata_parameters', None) self.arguments = kwargs.get('arguments', None) class AetherStructuredInterfaceInput(msrest.serialization.Model): """AetherStructuredInterfaceInput. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar data_type_ids_list: :vartype data_type_ids_list: list[str] :ivar is_optional: :vartype is_optional: bool :ivar description: :vartype description: str :ivar skip_processing: :vartype skip_processing: bool :ivar is_resource: :vartype is_resource: bool :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AetherDataStoreMode :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool :ivar data_reference_name: :vartype data_reference_name: str :ivar dataset_types: :vartype dataset_types: list[str or ~flow.models.AetherDatasetType] :ivar additional_transformations: :vartype additional_transformations: str """ _validation = { 'dataset_types': {'unique': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'data_type_ids_list': {'key': 'dataTypeIdsList', 'type': '[str]'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'skip_processing': {'key': 'skipProcessing', 'type': 'bool'}, 'is_resource': {'key': 'isResource', 'type': 'bool'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'}, 'dataset_types': {'key': 'datasetTypes', 'type': '[str]'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword data_type_ids_list: :paramtype data_type_ids_list: list[str] :keyword is_optional: :paramtype is_optional: bool :keyword description: :paramtype description: str :keyword skip_processing: :paramtype skip_processing: bool :keyword is_resource: :paramtype is_resource: bool :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool :keyword data_reference_name: :paramtype data_reference_name: str :keyword dataset_types: :paramtype dataset_types: list[str or ~flow.models.AetherDatasetType] :keyword additional_transformations: :paramtype additional_transformations: str """ super(AetherStructuredInterfaceInput, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.label = kwargs.get('label', None) self.data_type_ids_list = kwargs.get('data_type_ids_list', None) self.is_optional = kwargs.get('is_optional', None) self.description = kwargs.get('description', None) self.skip_processing = kwargs.get('skip_processing', None) self.is_resource = kwargs.get('is_resource', None) self.data_store_mode = kwargs.get('data_store_mode', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.overwrite = kwargs.get('overwrite', None) self.data_reference_name = kwargs.get('data_reference_name', None) self.dataset_types = kwargs.get('dataset_types', None) self.additional_transformations = kwargs.get('additional_transformations', None) class AetherStructuredInterfaceOutput(msrest.serialization.Model): """AetherStructuredInterfaceOutput. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar data_type_id: :vartype data_type_id: str :ivar pass_through_data_type_input_name: :vartype pass_through_data_type_input_name: str :ivar description: :vartype description: str :ivar skip_processing: :vartype skip_processing: bool :ivar is_artifact: :vartype is_artifact: bool :ivar data_store_name: :vartype data_store_name: str :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AetherDataStoreMode :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool :ivar data_reference_name: :vartype data_reference_name: str :ivar training_output: :vartype training_output: ~flow.models.AetherTrainingOutput :ivar dataset_output: :vartype dataset_output: ~flow.models.AetherDatasetOutput :ivar asset_output_settings: :vartype asset_output_settings: ~flow.models.AetherAssetOutputSettings :ivar early_available: :vartype early_available: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'data_type_id': {'key': 'dataTypeId', 'type': 'str'}, 'pass_through_data_type_input_name': {'key': 'passThroughDataTypeInputName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'skip_processing': {'key': 'skipProcessing', 'type': 'bool'}, 'is_artifact': {'key': 'isArtifact', 'type': 'bool'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'}, 'training_output': {'key': 'trainingOutput', 'type': 'AetherTrainingOutput'}, 'dataset_output': {'key': 'datasetOutput', 'type': 'AetherDatasetOutput'}, 'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AetherAssetOutputSettings'}, 'early_available': {'key': 'earlyAvailable', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword data_type_id: :paramtype data_type_id: str :keyword pass_through_data_type_input_name: :paramtype pass_through_data_type_input_name: str :keyword description: :paramtype description: str :keyword skip_processing: :paramtype skip_processing: bool :keyword is_artifact: :paramtype is_artifact: bool :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool :keyword data_reference_name: :paramtype data_reference_name: str :keyword training_output: :paramtype training_output: ~flow.models.AetherTrainingOutput :keyword dataset_output: :paramtype dataset_output: ~flow.models.AetherDatasetOutput :keyword asset_output_settings: :paramtype asset_output_settings: ~flow.models.AetherAssetOutputSettings :keyword early_available: :paramtype early_available: bool """ super(AetherStructuredInterfaceOutput, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.label = kwargs.get('label', None) self.data_type_id = kwargs.get('data_type_id', None) self.pass_through_data_type_input_name = kwargs.get('pass_through_data_type_input_name', None) self.description = kwargs.get('description', None) self.skip_processing = kwargs.get('skip_processing', None) self.is_artifact = kwargs.get('is_artifact', None) self.data_store_name = kwargs.get('data_store_name', None) self.data_store_mode = kwargs.get('data_store_mode', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.overwrite = kwargs.get('overwrite', None) self.data_reference_name = kwargs.get('data_reference_name', None) self.training_output = kwargs.get('training_output', None) self.dataset_output = kwargs.get('dataset_output', None) self.asset_output_settings = kwargs.get('asset_output_settings', None) self.early_available = kwargs.get('early_available', None) class AetherStructuredInterfaceParameter(msrest.serialization.Model): """AetherStructuredInterfaceParameter. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined". :vartype parameter_type: str or ~flow.models.AetherParameterType :ivar is_optional: :vartype is_optional: bool :ivar default_value: :vartype default_value: str :ivar lower_bound: :vartype lower_bound: str :ivar upper_bound: :vartype upper_bound: str :ivar enum_values: :vartype enum_values: list[str] :ivar enum_values_to_argument_strings: This is a dictionary. :vartype enum_values_to_argument_strings: dict[str, str] :ivar description: :vartype description: str :ivar set_environment_variable: :vartype set_environment_variable: bool :ivar environment_variable_override: :vartype environment_variable_override: str :ivar enabled_by_parameter_name: :vartype enabled_by_parameter_name: str :ivar enabled_by_parameter_values: :vartype enabled_by_parameter_values: list[str] :ivar ui_hint: :vartype ui_hint: ~flow.models.AetherUIParameterHint :ivar group_names: :vartype group_names: list[str] :ivar argument_name: :vartype argument_name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'parameter_type': {'key': 'parameterType', 'type': 'str'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, 'lower_bound': {'key': 'lowerBound', 'type': 'str'}, 'upper_bound': {'key': 'upperBound', 'type': 'str'}, 'enum_values': {'key': 'enumValues', 'type': '[str]'}, 'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'}, 'description': {'key': 'description', 'type': 'str'}, 'set_environment_variable': {'key': 'setEnvironmentVariable', 'type': 'bool'}, 'environment_variable_override': {'key': 'environmentVariableOverride', 'type': 'str'}, 'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'}, 'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'}, 'ui_hint': {'key': 'uiHint', 'type': 'AetherUIParameterHint'}, 'group_names': {'key': 'groupNames', 'type': '[str]'}, 'argument_name': {'key': 'argumentName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined". :paramtype parameter_type: str or ~flow.models.AetherParameterType :keyword is_optional: :paramtype is_optional: bool :keyword default_value: :paramtype default_value: str :keyword lower_bound: :paramtype lower_bound: str :keyword upper_bound: :paramtype upper_bound: str :keyword enum_values: :paramtype enum_values: list[str] :keyword enum_values_to_argument_strings: This is a dictionary. :paramtype enum_values_to_argument_strings: dict[str, str] :keyword description: :paramtype description: str :keyword set_environment_variable: :paramtype set_environment_variable: bool :keyword environment_variable_override: :paramtype environment_variable_override: str :keyword enabled_by_parameter_name: :paramtype enabled_by_parameter_name: str :keyword enabled_by_parameter_values: :paramtype enabled_by_parameter_values: list[str] :keyword ui_hint: :paramtype ui_hint: ~flow.models.AetherUIParameterHint :keyword group_names: :paramtype group_names: list[str] :keyword argument_name: :paramtype argument_name: str """ super(AetherStructuredInterfaceParameter, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.label = kwargs.get('label', None) self.parameter_type = kwargs.get('parameter_type', None) self.is_optional = kwargs.get('is_optional', None) self.default_value = kwargs.get('default_value', None) self.lower_bound = kwargs.get('lower_bound', None) self.upper_bound = kwargs.get('upper_bound', None) self.enum_values = kwargs.get('enum_values', None) self.enum_values_to_argument_strings = kwargs.get('enum_values_to_argument_strings', None) self.description = kwargs.get('description', None) self.set_environment_variable = kwargs.get('set_environment_variable', None) self.environment_variable_override = kwargs.get('environment_variable_override', None) self.enabled_by_parameter_name = kwargs.get('enabled_by_parameter_name', None) self.enabled_by_parameter_values = kwargs.get('enabled_by_parameter_values', None) self.ui_hint = kwargs.get('ui_hint', None) self.group_names = kwargs.get('group_names', None) self.argument_name = kwargs.get('argument_name', None) class AetherSubGraphConfiguration(msrest.serialization.Model): """AetherSubGraphConfiguration. :ivar graph_id: :vartype graph_id: str :ivar graph_draft_id: :vartype graph_draft_id: str :ivar default_compute_internal: :vartype default_compute_internal: ~flow.models.AetherComputeSetting :ivar default_datastore_internal: :vartype default_datastore_internal: ~flow.models.AetherDatastoreSetting :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting :ivar user_alias: :vartype user_alias: str :ivar is_dynamic: :vartype is_dynamic: bool """ _attribute_map = { 'graph_id': {'key': 'graphId', 'type': 'str'}, 'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'}, 'default_compute_internal': {'key': 'defaultComputeInternal', 'type': 'AetherComputeSetting'}, 'default_datastore_internal': {'key': 'defaultDatastoreInternal', 'type': 'AetherDatastoreSetting'}, 'default_cloud_priority': {'key': 'DefaultCloudPriority', 'type': 'AetherCloudPrioritySetting'}, 'user_alias': {'key': 'UserAlias', 'type': 'str'}, 'is_dynamic': {'key': 'IsDynamic', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword graph_id: :paramtype graph_id: str :keyword graph_draft_id: :paramtype graph_draft_id: str :keyword default_compute_internal: :paramtype default_compute_internal: ~flow.models.AetherComputeSetting :keyword default_datastore_internal: :paramtype default_datastore_internal: ~flow.models.AetherDatastoreSetting :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting :keyword user_alias: :paramtype user_alias: str :keyword is_dynamic: :paramtype is_dynamic: bool """ super(AetherSubGraphConfiguration, self).__init__(**kwargs) self.graph_id = kwargs.get('graph_id', None) self.graph_draft_id = kwargs.get('graph_draft_id', None) self.default_compute_internal = kwargs.get('default_compute_internal', None) self.default_datastore_internal = kwargs.get('default_datastore_internal', None) self.default_cloud_priority = kwargs.get('default_cloud_priority', None) self.user_alias = kwargs.get('user_alias', None) self.is_dynamic = kwargs.get('is_dynamic', False) class AetherSweepEarlyTerminationPolicy(msrest.serialization.Model): """AetherSweepEarlyTerminationPolicy. :ivar policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection". :vartype policy_type: str or ~flow.models.AetherEarlyTerminationPolicyType :ivar evaluation_interval: :vartype evaluation_interval: int :ivar delay_evaluation: :vartype delay_evaluation: int :ivar slack_factor: :vartype slack_factor: float :ivar slack_amount: :vartype slack_amount: float :ivar truncation_percentage: :vartype truncation_percentage: int """ _attribute_map = { 'policy_type': {'key': 'policyType', 'type': 'str'}, 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, 'slack_factor': {'key': 'slackFactor', 'type': 'float'}, 'slack_amount': {'key': 'slackAmount', 'type': 'float'}, 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection". :paramtype policy_type: str or ~flow.models.AetherEarlyTerminationPolicyType :keyword evaluation_interval: :paramtype evaluation_interval: int :keyword delay_evaluation: :paramtype delay_evaluation: int :keyword slack_factor: :paramtype slack_factor: float :keyword slack_amount: :paramtype slack_amount: float :keyword truncation_percentage: :paramtype truncation_percentage: int """ super(AetherSweepEarlyTerminationPolicy, self).__init__(**kwargs) self.policy_type = kwargs.get('policy_type', None) self.evaluation_interval = kwargs.get('evaluation_interval', None) self.delay_evaluation = kwargs.get('delay_evaluation', None) self.slack_factor = kwargs.get('slack_factor', None) self.slack_amount = kwargs.get('slack_amount', None) self.truncation_percentage = kwargs.get('truncation_percentage', None) class AetherSweepSettings(msrest.serialization.Model): """AetherSweepSettings. :ivar limits: :vartype limits: ~flow.models.AetherSweepSettingsLimits :ivar search_space: :vartype search_space: list[dict[str, str]] :ivar sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian". :vartype sampling_algorithm: str or ~flow.models.AetherSamplingAlgorithmType :ivar early_termination: :vartype early_termination: ~flow.models.AetherSweepEarlyTerminationPolicy """ _attribute_map = { 'limits': {'key': 'limits', 'type': 'AetherSweepSettingsLimits'}, 'search_space': {'key': 'searchSpace', 'type': '[{str}]'}, 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, 'early_termination': {'key': 'earlyTermination', 'type': 'AetherSweepEarlyTerminationPolicy'}, } def __init__( self, **kwargs ): """ :keyword limits: :paramtype limits: ~flow.models.AetherSweepSettingsLimits :keyword search_space: :paramtype search_space: list[dict[str, str]] :keyword sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian". :paramtype sampling_algorithm: str or ~flow.models.AetherSamplingAlgorithmType :keyword early_termination: :paramtype early_termination: ~flow.models.AetherSweepEarlyTerminationPolicy """ super(AetherSweepSettings, self).__init__(**kwargs) self.limits = kwargs.get('limits', None) self.search_space = kwargs.get('search_space', None) self.sampling_algorithm = kwargs.get('sampling_algorithm', None) self.early_termination = kwargs.get('early_termination', None) class AetherSweepSettingsLimits(msrest.serialization.Model): """AetherSweepSettingsLimits. :ivar max_total_trials: :vartype max_total_trials: int :ivar max_concurrent_trials: :vartype max_concurrent_trials: int """ _attribute_map = { 'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'}, 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword max_total_trials: :paramtype max_total_trials: int :keyword max_concurrent_trials: :paramtype max_concurrent_trials: int """ super(AetherSweepSettingsLimits, self).__init__(**kwargs) self.max_total_trials = kwargs.get('max_total_trials', None) self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None) class AetherTargetLags(msrest.serialization.Model): """AetherTargetLags. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.AetherTargetLagsMode :ivar values: :vartype values: list[int] """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'values': {'key': 'values', 'type': '[int]'}, } def __init__( self, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.AetherTargetLagsMode :keyword values: :paramtype values: list[int] """ super(AetherTargetLags, self).__init__(**kwargs) self.mode = kwargs.get('mode', None) self.values = kwargs.get('values', None) class AetherTargetRollingWindowSize(msrest.serialization.Model): """AetherTargetRollingWindowSize. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.AetherTargetRollingWindowSizeMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.AetherTargetRollingWindowSizeMode :keyword value: :paramtype value: int """ super(AetherTargetRollingWindowSize, self).__init__(**kwargs) self.mode = kwargs.get('mode', None) self.value = kwargs.get('value', None) class AetherTargetSelectorConfiguration(msrest.serialization.Model): """AetherTargetSelectorConfiguration. :ivar low_priority_vm_tolerant: :vartype low_priority_vm_tolerant: bool :ivar cluster_block_list: :vartype cluster_block_list: list[str] :ivar compute_type: :vartype compute_type: str :ivar instance_type: :vartype instance_type: list[str] :ivar instance_types: :vartype instance_types: list[str] :ivar my_resource_only: :vartype my_resource_only: bool :ivar plan_id: :vartype plan_id: str :ivar plan_region_id: :vartype plan_region_id: str :ivar region: :vartype region: list[str] :ivar regions: :vartype regions: list[str] :ivar vc_block_list: :vartype vc_block_list: list[str] """ _attribute_map = { 'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'}, 'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'instance_type': {'key': 'instanceType', 'type': '[str]'}, 'instance_types': {'key': 'instanceTypes', 'type': '[str]'}, 'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'}, 'plan_id': {'key': 'planId', 'type': 'str'}, 'plan_region_id': {'key': 'planRegionId', 'type': 'str'}, 'region': {'key': 'region', 'type': '[str]'}, 'regions': {'key': 'regions', 'type': '[str]'}, 'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword low_priority_vm_tolerant: :paramtype low_priority_vm_tolerant: bool :keyword cluster_block_list: :paramtype cluster_block_list: list[str] :keyword compute_type: :paramtype compute_type: str :keyword instance_type: :paramtype instance_type: list[str] :keyword instance_types: :paramtype instance_types: list[str] :keyword my_resource_only: :paramtype my_resource_only: bool :keyword plan_id: :paramtype plan_id: str :keyword plan_region_id: :paramtype plan_region_id: str :keyword region: :paramtype region: list[str] :keyword regions: :paramtype regions: list[str] :keyword vc_block_list: :paramtype vc_block_list: list[str] """ super(AetherTargetSelectorConfiguration, self).__init__(**kwargs) self.low_priority_vm_tolerant = kwargs.get('low_priority_vm_tolerant', None) self.cluster_block_list = kwargs.get('cluster_block_list', None) self.compute_type = kwargs.get('compute_type', None) self.instance_type = kwargs.get('instance_type', None) self.instance_types = kwargs.get('instance_types', None) self.my_resource_only = kwargs.get('my_resource_only', None) self.plan_id = kwargs.get('plan_id', None) self.plan_region_id = kwargs.get('plan_region_id', None) self.region = kwargs.get('region', None) self.regions = kwargs.get('regions', None) self.vc_block_list = kwargs.get('vc_block_list', None) class AetherTestDataSettings(msrest.serialization.Model): """AetherTestDataSettings. :ivar test_data_size: :vartype test_data_size: float """ _attribute_map = { 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, } def __init__( self, **kwargs ): """ :keyword test_data_size: :paramtype test_data_size: float """ super(AetherTestDataSettings, self).__init__(**kwargs) self.test_data_size = kwargs.get('test_data_size', None) class AetherTorchDistributedConfiguration(msrest.serialization.Model): """AetherTorchDistributedConfiguration. :ivar process_count_per_node: :vartype process_count_per_node: int """ _attribute_map = { 'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword process_count_per_node: :paramtype process_count_per_node: int """ super(AetherTorchDistributedConfiguration, self).__init__(**kwargs) self.process_count_per_node = kwargs.get('process_count_per_node', None) class AetherTrainingOutput(msrest.serialization.Model): """AetherTrainingOutput. :ivar training_output_type: Possible values include: "Metrics", "Model". :vartype training_output_type: str or ~flow.models.AetherTrainingOutputType :ivar iteration: :vartype iteration: int :ivar metric: :vartype metric: str :ivar model_file: :vartype model_file: str """ _attribute_map = { 'training_output_type': {'key': 'trainingOutputType', 'type': 'str'}, 'iteration': {'key': 'iteration', 'type': 'int'}, 'metric': {'key': 'metric', 'type': 'str'}, 'model_file': {'key': 'modelFile', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword training_output_type: Possible values include: "Metrics", "Model". :paramtype training_output_type: str or ~flow.models.AetherTrainingOutputType :keyword iteration: :paramtype iteration: int :keyword metric: :paramtype metric: str :keyword model_file: :paramtype model_file: str """ super(AetherTrainingOutput, self).__init__(**kwargs) self.training_output_type = kwargs.get('training_output_type', None) self.iteration = kwargs.get('iteration', None) self.metric = kwargs.get('metric', None) self.model_file = kwargs.get('model_file', None) class AetherTrainingSettings(msrest.serialization.Model): """AetherTrainingSettings. :ivar block_list_models: :vartype block_list_models: list[str] :ivar allow_list_models: :vartype allow_list_models: list[str] :ivar enable_dnn_training: :vartype enable_dnn_training: bool :ivar enable_onnx_compatible_models: :vartype enable_onnx_compatible_models: bool :ivar stack_ensemble_settings: :vartype stack_ensemble_settings: ~flow.models.AetherStackEnsembleSettings :ivar enable_stack_ensemble: :vartype enable_stack_ensemble: bool :ivar enable_vote_ensemble: :vartype enable_vote_ensemble: bool :ivar ensemble_model_download_timeout: :vartype ensemble_model_download_timeout: str :ivar enable_model_explainability: :vartype enable_model_explainability: bool :ivar training_mode: Possible values include: "Distributed", "NonDistributed", "Auto". :vartype training_mode: str or ~flow.models.AetherTabularTrainingMode """ _attribute_map = { 'block_list_models': {'key': 'blockListModels', 'type': '[str]'}, 'allow_list_models': {'key': 'allowListModels', 'type': '[str]'}, 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'AetherStackEnsembleSettings'}, 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'str'}, 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, 'training_mode': {'key': 'trainingMode', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword block_list_models: :paramtype block_list_models: list[str] :keyword allow_list_models: :paramtype allow_list_models: list[str] :keyword enable_dnn_training: :paramtype enable_dnn_training: bool :keyword enable_onnx_compatible_models: :paramtype enable_onnx_compatible_models: bool :keyword stack_ensemble_settings: :paramtype stack_ensemble_settings: ~flow.models.AetherStackEnsembleSettings :keyword enable_stack_ensemble: :paramtype enable_stack_ensemble: bool :keyword enable_vote_ensemble: :paramtype enable_vote_ensemble: bool :keyword ensemble_model_download_timeout: :paramtype ensemble_model_download_timeout: str :keyword enable_model_explainability: :paramtype enable_model_explainability: bool :keyword training_mode: Possible values include: "Distributed", "NonDistributed", "Auto". :paramtype training_mode: str or ~flow.models.AetherTabularTrainingMode """ super(AetherTrainingSettings, self).__init__(**kwargs) self.block_list_models = kwargs.get('block_list_models', None) self.allow_list_models = kwargs.get('allow_list_models', None) self.enable_dnn_training = kwargs.get('enable_dnn_training', None) self.enable_onnx_compatible_models = kwargs.get('enable_onnx_compatible_models', None) self.stack_ensemble_settings = kwargs.get('stack_ensemble_settings', None) self.enable_stack_ensemble = kwargs.get('enable_stack_ensemble', None) self.enable_vote_ensemble = kwargs.get('enable_vote_ensemble', None) self.ensemble_model_download_timeout = kwargs.get('ensemble_model_download_timeout', None) self.enable_model_explainability = kwargs.get('enable_model_explainability', None) self.training_mode = kwargs.get('training_mode', None) class AetherUIAzureOpenAIDeploymentNameSelector(msrest.serialization.Model): """AetherUIAzureOpenAIDeploymentNameSelector. :ivar capabilities: :vartype capabilities: ~flow.models.AetherUIAzureOpenAIModelCapabilities """ _attribute_map = { 'capabilities': {'key': 'Capabilities', 'type': 'AetherUIAzureOpenAIModelCapabilities'}, } def __init__( self, **kwargs ): """ :keyword capabilities: :paramtype capabilities: ~flow.models.AetherUIAzureOpenAIModelCapabilities """ super(AetherUIAzureOpenAIDeploymentNameSelector, self).__init__(**kwargs) self.capabilities = kwargs.get('capabilities', None) class AetherUIAzureOpenAIModelCapabilities(msrest.serialization.Model): """AetherUIAzureOpenAIModelCapabilities. :ivar completion: :vartype completion: bool :ivar chat_completion: :vartype chat_completion: bool :ivar embeddings: :vartype embeddings: bool """ _attribute_map = { 'completion': {'key': 'Completion', 'type': 'bool'}, 'chat_completion': {'key': 'ChatCompletion', 'type': 'bool'}, 'embeddings': {'key': 'Embeddings', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword completion: :paramtype completion: bool :keyword chat_completion: :paramtype chat_completion: bool :keyword embeddings: :paramtype embeddings: bool """ super(AetherUIAzureOpenAIModelCapabilities, self).__init__(**kwargs) self.completion = kwargs.get('completion', None) self.chat_completion = kwargs.get('chat_completion', None) self.embeddings = kwargs.get('embeddings', None) class AetherUIColumnPicker(msrest.serialization.Model): """AetherUIColumnPicker. :ivar column_picker_for: :vartype column_picker_for: str :ivar column_selection_categories: :vartype column_selection_categories: list[str] :ivar single_column_selection: :vartype single_column_selection: bool """ _attribute_map = { 'column_picker_for': {'key': 'columnPickerFor', 'type': 'str'}, 'column_selection_categories': {'key': 'columnSelectionCategories', 'type': '[str]'}, 'single_column_selection': {'key': 'singleColumnSelection', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword column_picker_for: :paramtype column_picker_for: str :keyword column_selection_categories: :paramtype column_selection_categories: list[str] :keyword single_column_selection: :paramtype single_column_selection: bool """ super(AetherUIColumnPicker, self).__init__(**kwargs) self.column_picker_for = kwargs.get('column_picker_for', None) self.column_selection_categories = kwargs.get('column_selection_categories', None) self.single_column_selection = kwargs.get('single_column_selection', None) class AetherUIJsonEditor(msrest.serialization.Model): """AetherUIJsonEditor. :ivar json_schema: :vartype json_schema: str """ _attribute_map = { 'json_schema': {'key': 'jsonSchema', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword json_schema: :paramtype json_schema: str """ super(AetherUIJsonEditor, self).__init__(**kwargs) self.json_schema = kwargs.get('json_schema', None) class AetherUIParameterHint(msrest.serialization.Model): """AetherUIParameterHint. :ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection". :vartype ui_widget_type: str or ~flow.models.AetherUIWidgetTypeEnum :ivar column_picker: :vartype column_picker: ~flow.models.AetherUIColumnPicker :ivar ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql". :vartype ui_script_language: str or ~flow.models.AetherUIScriptLanguageEnum :ivar json_editor: :vartype json_editor: ~flow.models.AetherUIJsonEditor :ivar prompt_flow_connection_selector: :vartype prompt_flow_connection_selector: ~flow.models.AetherUIPromptFlowConnectionSelector :ivar azure_open_ai_deployment_name_selector: :vartype azure_open_ai_deployment_name_selector: ~flow.models.AetherUIAzureOpenAIDeploymentNameSelector :ivar ux_ignore: :vartype ux_ignore: bool :ivar anonymous: :vartype anonymous: bool """ _attribute_map = { 'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'}, 'column_picker': {'key': 'columnPicker', 'type': 'AetherUIColumnPicker'}, 'ui_script_language': {'key': 'uiScriptLanguage', 'type': 'str'}, 'json_editor': {'key': 'jsonEditor', 'type': 'AetherUIJsonEditor'}, 'prompt_flow_connection_selector': {'key': 'PromptFlowConnectionSelector', 'type': 'AetherUIPromptFlowConnectionSelector'}, 'azure_open_ai_deployment_name_selector': {'key': 'AzureOpenAIDeploymentNameSelector', 'type': 'AetherUIAzureOpenAIDeploymentNameSelector'}, 'ux_ignore': {'key': 'UxIgnore', 'type': 'bool'}, 'anonymous': {'key': 'Anonymous', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection". :paramtype ui_widget_type: str or ~flow.models.AetherUIWidgetTypeEnum :keyword column_picker: :paramtype column_picker: ~flow.models.AetherUIColumnPicker :keyword ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql". :paramtype ui_script_language: str or ~flow.models.AetherUIScriptLanguageEnum :keyword json_editor: :paramtype json_editor: ~flow.models.AetherUIJsonEditor :keyword prompt_flow_connection_selector: :paramtype prompt_flow_connection_selector: ~flow.models.AetherUIPromptFlowConnectionSelector :keyword azure_open_ai_deployment_name_selector: :paramtype azure_open_ai_deployment_name_selector: ~flow.models.AetherUIAzureOpenAIDeploymentNameSelector :keyword ux_ignore: :paramtype ux_ignore: bool :keyword anonymous: :paramtype anonymous: bool """ super(AetherUIParameterHint, self).__init__(**kwargs) self.ui_widget_type = kwargs.get('ui_widget_type', None) self.column_picker = kwargs.get('column_picker', None) self.ui_script_language = kwargs.get('ui_script_language', None) self.json_editor = kwargs.get('json_editor', None) self.prompt_flow_connection_selector = kwargs.get('prompt_flow_connection_selector', None) self.azure_open_ai_deployment_name_selector = kwargs.get('azure_open_ai_deployment_name_selector', None) self.ux_ignore = kwargs.get('ux_ignore', None) self.anonymous = kwargs.get('anonymous', None) class AetherUIPromptFlowConnectionSelector(msrest.serialization.Model): """AetherUIPromptFlowConnectionSelector. :ivar prompt_flow_connection_type: :vartype prompt_flow_connection_type: str """ _attribute_map = { 'prompt_flow_connection_type': {'key': 'PromptFlowConnectionType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword prompt_flow_connection_type: :paramtype prompt_flow_connection_type: str """ super(AetherUIPromptFlowConnectionSelector, self).__init__(**kwargs) self.prompt_flow_connection_type = kwargs.get('prompt_flow_connection_type', None) class AetherValidationDataSettings(msrest.serialization.Model): """AetherValidationDataSettings. :ivar n_cross_validations: :vartype n_cross_validations: ~flow.models.AetherNCrossValidations :ivar validation_data_size: :vartype validation_data_size: float :ivar cv_split_column_names: :vartype cv_split_column_names: list[str] :ivar validation_type: :vartype validation_type: str """ _attribute_map = { 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'AetherNCrossValidations'}, 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, 'validation_type': {'key': 'validationType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword n_cross_validations: :paramtype n_cross_validations: ~flow.models.AetherNCrossValidations :keyword validation_data_size: :paramtype validation_data_size: float :keyword cv_split_column_names: :paramtype cv_split_column_names: list[str] :keyword validation_type: :paramtype validation_type: str """ super(AetherValidationDataSettings, self).__init__(**kwargs) self.n_cross_validations = kwargs.get('n_cross_validations', None) self.validation_data_size = kwargs.get('validation_data_size', None) self.cv_split_column_names = kwargs.get('cv_split_column_names', None) self.validation_type = kwargs.get('validation_type', None) class AetherVsoBuildArtifactInfo(msrest.serialization.Model): """AetherVsoBuildArtifactInfo. :ivar build_info: :vartype build_info: ~flow.models.AetherVsoBuildInfo :ivar download_url: :vartype download_url: str """ _attribute_map = { 'build_info': {'key': 'buildInfo', 'type': 'AetherVsoBuildInfo'}, 'download_url': {'key': 'downloadUrl', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword build_info: :paramtype build_info: ~flow.models.AetherVsoBuildInfo :keyword download_url: :paramtype download_url: str """ super(AetherVsoBuildArtifactInfo, self).__init__(**kwargs) self.build_info = kwargs.get('build_info', None) self.download_url = kwargs.get('download_url', None) class AetherVsoBuildDefinitionInfo(msrest.serialization.Model): """AetherVsoBuildDefinitionInfo. :ivar account_name: :vartype account_name: str :ivar project_id: :vartype project_id: str :ivar build_definition_id: :vartype build_definition_id: int """ _attribute_map = { 'account_name': {'key': 'accountName', 'type': 'str'}, 'project_id': {'key': 'projectId', 'type': 'str'}, 'build_definition_id': {'key': 'buildDefinitionId', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword account_name: :paramtype account_name: str :keyword project_id: :paramtype project_id: str :keyword build_definition_id: :paramtype build_definition_id: int """ super(AetherVsoBuildDefinitionInfo, self).__init__(**kwargs) self.account_name = kwargs.get('account_name', None) self.project_id = kwargs.get('project_id', None) self.build_definition_id = kwargs.get('build_definition_id', None) class AetherVsoBuildInfo(msrest.serialization.Model): """AetherVsoBuildInfo. :ivar definition_info: :vartype definition_info: ~flow.models.AetherVsoBuildDefinitionInfo :ivar build_id: :vartype build_id: int """ _attribute_map = { 'definition_info': {'key': 'definitionInfo', 'type': 'AetherVsoBuildDefinitionInfo'}, 'build_id': {'key': 'buildId', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword definition_info: :paramtype definition_info: ~flow.models.AetherVsoBuildDefinitionInfo :keyword build_id: :paramtype build_id: int """ super(AetherVsoBuildInfo, self).__init__(**kwargs) self.definition_info = kwargs.get('definition_info', None) self.build_id = kwargs.get('build_id', None) class AEVAComputeConfiguration(msrest.serialization.Model): """AEVAComputeConfiguration. :ivar target: :vartype target: str :ivar instance_count: :vartype instance_count: int :ivar is_local: :vartype is_local: bool :ivar location: :vartype location: str :ivar is_clusterless: :vartype is_clusterless: bool :ivar instance_type: :vartype instance_type: str :ivar properties: Dictionary of :code:`<any>`. :vartype properties: dict[str, any] :ivar is_preemptable: :vartype is_preemptable: bool """ _attribute_map = { 'target': {'key': 'target', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'is_local': {'key': 'isLocal', 'type': 'bool'}, 'location': {'key': 'location', 'type': 'str'}, 'is_clusterless': {'key': 'isClusterless', 'type': 'bool'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{object}'}, 'is_preemptable': {'key': 'isPreemptable', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword target: :paramtype target: str :keyword instance_count: :paramtype instance_count: int :keyword is_local: :paramtype is_local: bool :keyword location: :paramtype location: str :keyword is_clusterless: :paramtype is_clusterless: bool :keyword instance_type: :paramtype instance_type: str :keyword properties: Dictionary of :code:`<any>`. :paramtype properties: dict[str, any] :keyword is_preemptable: :paramtype is_preemptable: bool """ super(AEVAComputeConfiguration, self).__init__(**kwargs) self.target = kwargs.get('target', None) self.instance_count = kwargs.get('instance_count', None) self.is_local = kwargs.get('is_local', None) self.location = kwargs.get('location', None) self.is_clusterless = kwargs.get('is_clusterless', None) self.instance_type = kwargs.get('instance_type', None) self.properties = kwargs.get('properties', None) self.is_preemptable = kwargs.get('is_preemptable', None) class AEVAResourceConfiguration(msrest.serialization.Model): """AEVAResourceConfiguration. :ivar instance_count: :vartype instance_count: int :ivar instance_type: :vartype instance_type: str :ivar properties: Dictionary of :code:`<any>`. :vartype properties: dict[str, any] :ivar locations: :vartype locations: list[str] :ivar instance_priority: :vartype instance_priority: str :ivar quota_enforcement_resource_id: :vartype quota_enforcement_resource_id: str """ _attribute_map = { 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{object}'}, 'locations': {'key': 'locations', 'type': '[str]'}, 'instance_priority': {'key': 'instancePriority', 'type': 'str'}, 'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword instance_count: :paramtype instance_count: int :keyword instance_type: :paramtype instance_type: str :keyword properties: Dictionary of :code:`<any>`. :paramtype properties: dict[str, any] :keyword locations: :paramtype locations: list[str] :keyword instance_priority: :paramtype instance_priority: str :keyword quota_enforcement_resource_id: :paramtype quota_enforcement_resource_id: str """ super(AEVAResourceConfiguration, self).__init__(**kwargs) self.instance_count = kwargs.get('instance_count', None) self.instance_type = kwargs.get('instance_type', None) self.properties = kwargs.get('properties', None) self.locations = kwargs.get('locations', None) self.instance_priority = kwargs.get('instance_priority', None) self.quota_enforcement_resource_id = kwargs.get('quota_enforcement_resource_id', None) class AISuperComputerConfiguration(msrest.serialization.Model): """AISuperComputerConfiguration. :ivar instance_type: :vartype instance_type: str :ivar instance_types: :vartype instance_types: list[str] :ivar image_version: :vartype image_version: str :ivar location: :vartype location: str :ivar locations: :vartype locations: list[str] :ivar ai_super_computer_storage_data: Dictionary of :code:`<AISuperComputerStorageReferenceConfiguration>`. :vartype ai_super_computer_storage_data: dict[str, ~flow.models.AISuperComputerStorageReferenceConfiguration] :ivar interactive: :vartype interactive: bool :ivar scale_policy: :vartype scale_policy: ~flow.models.AISuperComputerScalePolicy :ivar virtual_cluster_arm_id: :vartype virtual_cluster_arm_id: str :ivar tensorboard_log_directory: :vartype tensorboard_log_directory: str :ivar ssh_public_key: :vartype ssh_public_key: str :ivar ssh_public_keys: :vartype ssh_public_keys: list[str] :ivar enable_azml_int: :vartype enable_azml_int: bool :ivar priority: :vartype priority: str :ivar sla_tier: :vartype sla_tier: str :ivar suspend_on_idle_time_hours: :vartype suspend_on_idle_time_hours: long :ivar user_alias: :vartype user_alias: str :ivar quota_enforcement_resource_id: :vartype quota_enforcement_resource_id: str :ivar model_compute_specification_id: :vartype model_compute_specification_id: str :ivar group_policy_name: :vartype group_policy_name: str """ _attribute_map = { 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'instance_types': {'key': 'instanceTypes', 'type': '[str]'}, 'image_version': {'key': 'imageVersion', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'locations': {'key': 'locations', 'type': '[str]'}, 'ai_super_computer_storage_data': {'key': 'aiSuperComputerStorageData', 'type': '{AISuperComputerStorageReferenceConfiguration}'}, 'interactive': {'key': 'interactive', 'type': 'bool'}, 'scale_policy': {'key': 'scalePolicy', 'type': 'AISuperComputerScalePolicy'}, 'virtual_cluster_arm_id': {'key': 'virtualClusterArmId', 'type': 'str'}, 'tensorboard_log_directory': {'key': 'tensorboardLogDirectory', 'type': 'str'}, 'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'}, 'ssh_public_keys': {'key': 'sshPublicKeys', 'type': '[str]'}, 'enable_azml_int': {'key': 'enableAzmlInt', 'type': 'bool'}, 'priority': {'key': 'priority', 'type': 'str'}, 'sla_tier': {'key': 'slaTier', 'type': 'str'}, 'suspend_on_idle_time_hours': {'key': 'suspendOnIdleTimeHours', 'type': 'long'}, 'user_alias': {'key': 'userAlias', 'type': 'str'}, 'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'}, 'model_compute_specification_id': {'key': 'modelComputeSpecificationId', 'type': 'str'}, 'group_policy_name': {'key': 'groupPolicyName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword instance_type: :paramtype instance_type: str :keyword instance_types: :paramtype instance_types: list[str] :keyword image_version: :paramtype image_version: str :keyword location: :paramtype location: str :keyword locations: :paramtype locations: list[str] :keyword ai_super_computer_storage_data: Dictionary of :code:`<AISuperComputerStorageReferenceConfiguration>`. :paramtype ai_super_computer_storage_data: dict[str, ~flow.models.AISuperComputerStorageReferenceConfiguration] :keyword interactive: :paramtype interactive: bool :keyword scale_policy: :paramtype scale_policy: ~flow.models.AISuperComputerScalePolicy :keyword virtual_cluster_arm_id: :paramtype virtual_cluster_arm_id: str :keyword tensorboard_log_directory: :paramtype tensorboard_log_directory: str :keyword ssh_public_key: :paramtype ssh_public_key: str :keyword ssh_public_keys: :paramtype ssh_public_keys: list[str] :keyword enable_azml_int: :paramtype enable_azml_int: bool :keyword priority: :paramtype priority: str :keyword sla_tier: :paramtype sla_tier: str :keyword suspend_on_idle_time_hours: :paramtype suspend_on_idle_time_hours: long :keyword user_alias: :paramtype user_alias: str :keyword quota_enforcement_resource_id: :paramtype quota_enforcement_resource_id: str :keyword model_compute_specification_id: :paramtype model_compute_specification_id: str :keyword group_policy_name: :paramtype group_policy_name: str """ super(AISuperComputerConfiguration, self).__init__(**kwargs) self.instance_type = kwargs.get('instance_type', None) self.instance_types = kwargs.get('instance_types', None) self.image_version = kwargs.get('image_version', None) self.location = kwargs.get('location', None) self.locations = kwargs.get('locations', None) self.ai_super_computer_storage_data = kwargs.get('ai_super_computer_storage_data', None) self.interactive = kwargs.get('interactive', None) self.scale_policy = kwargs.get('scale_policy', None) self.virtual_cluster_arm_id = kwargs.get('virtual_cluster_arm_id', None) self.tensorboard_log_directory = kwargs.get('tensorboard_log_directory', None) self.ssh_public_key = kwargs.get('ssh_public_key', None) self.ssh_public_keys = kwargs.get('ssh_public_keys', None) self.enable_azml_int = kwargs.get('enable_azml_int', None) self.priority = kwargs.get('priority', None) self.sla_tier = kwargs.get('sla_tier', None) self.suspend_on_idle_time_hours = kwargs.get('suspend_on_idle_time_hours', None) self.user_alias = kwargs.get('user_alias', None) self.quota_enforcement_resource_id = kwargs.get('quota_enforcement_resource_id', None) self.model_compute_specification_id = kwargs.get('model_compute_specification_id', None) self.group_policy_name = kwargs.get('group_policy_name', None) class AISuperComputerScalePolicy(msrest.serialization.Model): """AISuperComputerScalePolicy. :ivar auto_scale_instance_type_count_set: :vartype auto_scale_instance_type_count_set: list[int] :ivar auto_scale_interval_in_sec: :vartype auto_scale_interval_in_sec: int :ivar max_instance_type_count: :vartype max_instance_type_count: int :ivar min_instance_type_count: :vartype min_instance_type_count: int """ _attribute_map = { 'auto_scale_instance_type_count_set': {'key': 'autoScaleInstanceTypeCountSet', 'type': '[int]'}, 'auto_scale_interval_in_sec': {'key': 'autoScaleIntervalInSec', 'type': 'int'}, 'max_instance_type_count': {'key': 'maxInstanceTypeCount', 'type': 'int'}, 'min_instance_type_count': {'key': 'minInstanceTypeCount', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword auto_scale_instance_type_count_set: :paramtype auto_scale_instance_type_count_set: list[int] :keyword auto_scale_interval_in_sec: :paramtype auto_scale_interval_in_sec: int :keyword max_instance_type_count: :paramtype max_instance_type_count: int :keyword min_instance_type_count: :paramtype min_instance_type_count: int """ super(AISuperComputerScalePolicy, self).__init__(**kwargs) self.auto_scale_instance_type_count_set = kwargs.get('auto_scale_instance_type_count_set', None) self.auto_scale_interval_in_sec = kwargs.get('auto_scale_interval_in_sec', None) self.max_instance_type_count = kwargs.get('max_instance_type_count', None) self.min_instance_type_count = kwargs.get('min_instance_type_count', None) class AISuperComputerStorageReferenceConfiguration(msrest.serialization.Model): """AISuperComputerStorageReferenceConfiguration. :ivar container_name: :vartype container_name: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'container_name': {'key': 'containerName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword container_name: :paramtype container_name: str :keyword relative_path: :paramtype relative_path: str """ super(AISuperComputerStorageReferenceConfiguration, self).__init__(**kwargs) self.container_name = kwargs.get('container_name', None) self.relative_path = kwargs.get('relative_path', None) class AKSAdvanceSettings(msrest.serialization.Model): """AKSAdvanceSettings. :ivar auto_scaler: :vartype auto_scaler: ~flow.models.AutoScaler :ivar container_resource_requirements: :vartype container_resource_requirements: ~flow.models.ContainerResourceRequirements :ivar app_insights_enabled: :vartype app_insights_enabled: bool :ivar scoring_timeout_ms: :vartype scoring_timeout_ms: int :ivar num_replicas: :vartype num_replicas: int """ _attribute_map = { 'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'}, 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'}, 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'}, 'num_replicas': {'key': 'numReplicas', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword auto_scaler: :paramtype auto_scaler: ~flow.models.AutoScaler :keyword container_resource_requirements: :paramtype container_resource_requirements: ~flow.models.ContainerResourceRequirements :keyword app_insights_enabled: :paramtype app_insights_enabled: bool :keyword scoring_timeout_ms: :paramtype scoring_timeout_ms: int :keyword num_replicas: :paramtype num_replicas: int """ super(AKSAdvanceSettings, self).__init__(**kwargs) self.auto_scaler = kwargs.get('auto_scaler', None) self.container_resource_requirements = kwargs.get('container_resource_requirements', None) self.app_insights_enabled = kwargs.get('app_insights_enabled', None) self.scoring_timeout_ms = kwargs.get('scoring_timeout_ms', None) self.num_replicas = kwargs.get('num_replicas', None) class AKSReplicaStatus(msrest.serialization.Model): """AKSReplicaStatus. :ivar desired_replicas: :vartype desired_replicas: int :ivar updated_replicas: :vartype updated_replicas: int :ivar available_replicas: :vartype available_replicas: int :ivar error: :vartype error: ~flow.models.ModelManagementErrorResponse """ _attribute_map = { 'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'}, 'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'}, 'available_replicas': {'key': 'availableReplicas', 'type': 'int'}, 'error': {'key': 'error', 'type': 'ModelManagementErrorResponse'}, } def __init__( self, **kwargs ): """ :keyword desired_replicas: :paramtype desired_replicas: int :keyword updated_replicas: :paramtype updated_replicas: int :keyword available_replicas: :paramtype available_replicas: int :keyword error: :paramtype error: ~flow.models.ModelManagementErrorResponse """ super(AKSReplicaStatus, self).__init__(**kwargs) self.desired_replicas = kwargs.get('desired_replicas', None) self.updated_replicas = kwargs.get('updated_replicas', None) self.available_replicas = kwargs.get('available_replicas', None) self.error = kwargs.get('error', None) class AMLComputeConfiguration(msrest.serialization.Model): """AMLComputeConfiguration. :ivar name: :vartype name: str :ivar vm_size: :vartype vm_size: str :ivar vm_priority: Possible values include: "Dedicated", "Lowpriority". :vartype vm_priority: str or ~flow.models.VmPriority :ivar retain_cluster: :vartype retain_cluster: bool :ivar cluster_max_node_count: :vartype cluster_max_node_count: int :ivar os_type: :vartype os_type: str :ivar virtual_machine_image: :vartype virtual_machine_image: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, 'retain_cluster': {'key': 'retainCluster', 'type': 'bool'}, 'cluster_max_node_count': {'key': 'clusterMaxNodeCount', 'type': 'int'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword vm_size: :paramtype vm_size: str :keyword vm_priority: Possible values include: "Dedicated", "Lowpriority". :paramtype vm_priority: str or ~flow.models.VmPriority :keyword retain_cluster: :paramtype retain_cluster: bool :keyword cluster_max_node_count: :paramtype cluster_max_node_count: int :keyword os_type: :paramtype os_type: str :keyword virtual_machine_image: :paramtype virtual_machine_image: str """ super(AMLComputeConfiguration, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.vm_size = kwargs.get('vm_size', None) self.vm_priority = kwargs.get('vm_priority', None) self.retain_cluster = kwargs.get('retain_cluster', None) self.cluster_max_node_count = kwargs.get('cluster_max_node_count', None) self.os_type = kwargs.get('os_type', None) self.virtual_machine_image = kwargs.get('virtual_machine_image', None) class AmlDataset(msrest.serialization.Model): """AmlDataset. :ivar registered_data_set_reference: :vartype registered_data_set_reference: ~flow.models.RegisteredDataSetReference :ivar saved_data_set_reference: :vartype saved_data_set_reference: ~flow.models.SavedDataSetReference :ivar additional_transformations: :vartype additional_transformations: str """ _attribute_map = { 'registered_data_set_reference': {'key': 'registeredDataSetReference', 'type': 'RegisteredDataSetReference'}, 'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'SavedDataSetReference'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword registered_data_set_reference: :paramtype registered_data_set_reference: ~flow.models.RegisteredDataSetReference :keyword saved_data_set_reference: :paramtype saved_data_set_reference: ~flow.models.SavedDataSetReference :keyword additional_transformations: :paramtype additional_transformations: str """ super(AmlDataset, self).__init__(**kwargs) self.registered_data_set_reference = kwargs.get('registered_data_set_reference', None) self.saved_data_set_reference = kwargs.get('saved_data_set_reference', None) self.additional_transformations = kwargs.get('additional_transformations', None) class AmlK8SConfiguration(msrest.serialization.Model): """AmlK8SConfiguration. :ivar resource_configuration: :vartype resource_configuration: ~flow.models.ResourceConfiguration :ivar priority_configuration: :vartype priority_configuration: ~flow.models.AmlK8SPriorityConfiguration :ivar interactive_configuration: :vartype interactive_configuration: ~flow.models.InteractiveConfiguration """ _attribute_map = { 'resource_configuration': {'key': 'resourceConfiguration', 'type': 'ResourceConfiguration'}, 'priority_configuration': {'key': 'priorityConfiguration', 'type': 'AmlK8SPriorityConfiguration'}, 'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'InteractiveConfiguration'}, } def __init__( self, **kwargs ): """ :keyword resource_configuration: :paramtype resource_configuration: ~flow.models.ResourceConfiguration :keyword priority_configuration: :paramtype priority_configuration: ~flow.models.AmlK8SPriorityConfiguration :keyword interactive_configuration: :paramtype interactive_configuration: ~flow.models.InteractiveConfiguration """ super(AmlK8SConfiguration, self).__init__(**kwargs) self.resource_configuration = kwargs.get('resource_configuration', None) self.priority_configuration = kwargs.get('priority_configuration', None) self.interactive_configuration = kwargs.get('interactive_configuration', None) class AmlK8SPriorityConfiguration(msrest.serialization.Model): """AmlK8SPriorityConfiguration. :ivar job_priority: :vartype job_priority: int :ivar is_preemptible: :vartype is_preemptible: bool :ivar node_count_set: :vartype node_count_set: list[int] :ivar scale_interval: :vartype scale_interval: int """ _attribute_map = { 'job_priority': {'key': 'jobPriority', 'type': 'int'}, 'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'}, 'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'}, 'scale_interval': {'key': 'scaleInterval', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword job_priority: :paramtype job_priority: int :keyword is_preemptible: :paramtype is_preemptible: bool :keyword node_count_set: :paramtype node_count_set: list[int] :keyword scale_interval: :paramtype scale_interval: int """ super(AmlK8SPriorityConfiguration, self).__init__(**kwargs) self.job_priority = kwargs.get('job_priority', None) self.is_preemptible = kwargs.get('is_preemptible', None) self.node_count_set = kwargs.get('node_count_set', None) self.scale_interval = kwargs.get('scale_interval', None) class AmlSparkCloudSetting(msrest.serialization.Model): """AmlSparkCloudSetting. :ivar entry: :vartype entry: ~flow.models.EntrySetting :ivar files: :vartype files: list[str] :ivar archives: :vartype archives: list[str] :ivar jars: :vartype jars: list[str] :ivar py_files: :vartype py_files: list[str] :ivar driver_memory: :vartype driver_memory: str :ivar driver_cores: :vartype driver_cores: int :ivar executor_memory: :vartype executor_memory: str :ivar executor_cores: :vartype executor_cores: int :ivar number_executors: :vartype number_executors: int :ivar environment_asset_id: :vartype environment_asset_id: str :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar inline_environment_definition_string: :vartype inline_environment_definition_string: str :ivar conf: Dictionary of :code:`<string>`. :vartype conf: dict[str, str] :ivar compute: :vartype compute: str :ivar resources: :vartype resources: ~flow.models.ResourcesSetting :ivar identity: :vartype identity: ~flow.models.IdentitySetting """ _attribute_map = { 'entry': {'key': 'entry', 'type': 'EntrySetting'}, 'files': {'key': 'files', 'type': '[str]'}, 'archives': {'key': 'archives', 'type': '[str]'}, 'jars': {'key': 'jars', 'type': '[str]'}, 'py_files': {'key': 'pyFiles', 'type': '[str]'}, 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, 'driver_cores': {'key': 'driverCores', 'type': 'int'}, 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, 'executor_cores': {'key': 'executorCores', 'type': 'int'}, 'number_executors': {'key': 'numberExecutors', 'type': 'int'}, 'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'inline_environment_definition_string': {'key': 'inlineEnvironmentDefinitionString', 'type': 'str'}, 'conf': {'key': 'conf', 'type': '{str}'}, 'compute': {'key': 'compute', 'type': 'str'}, 'resources': {'key': 'resources', 'type': 'ResourcesSetting'}, 'identity': {'key': 'identity', 'type': 'IdentitySetting'}, } def __init__( self, **kwargs ): """ :keyword entry: :paramtype entry: ~flow.models.EntrySetting :keyword files: :paramtype files: list[str] :keyword archives: :paramtype archives: list[str] :keyword jars: :paramtype jars: list[str] :keyword py_files: :paramtype py_files: list[str] :keyword driver_memory: :paramtype driver_memory: str :keyword driver_cores: :paramtype driver_cores: int :keyword executor_memory: :paramtype executor_memory: str :keyword executor_cores: :paramtype executor_cores: int :keyword number_executors: :paramtype number_executors: int :keyword environment_asset_id: :paramtype environment_asset_id: str :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword inline_environment_definition_string: :paramtype inline_environment_definition_string: str :keyword conf: Dictionary of :code:`<string>`. :paramtype conf: dict[str, str] :keyword compute: :paramtype compute: str :keyword resources: :paramtype resources: ~flow.models.ResourcesSetting :keyword identity: :paramtype identity: ~flow.models.IdentitySetting """ super(AmlSparkCloudSetting, self).__init__(**kwargs) self.entry = kwargs.get('entry', None) self.files = kwargs.get('files', None) self.archives = kwargs.get('archives', None) self.jars = kwargs.get('jars', None) self.py_files = kwargs.get('py_files', None) self.driver_memory = kwargs.get('driver_memory', None) self.driver_cores = kwargs.get('driver_cores', None) self.executor_memory = kwargs.get('executor_memory', None) self.executor_cores = kwargs.get('executor_cores', None) self.number_executors = kwargs.get('number_executors', None) self.environment_asset_id = kwargs.get('environment_asset_id', None) self.environment_variables = kwargs.get('environment_variables', None) self.inline_environment_definition_string = kwargs.get('inline_environment_definition_string', None) self.conf = kwargs.get('conf', None) self.compute = kwargs.get('compute', None) self.resources = kwargs.get('resources', None) self.identity = kwargs.get('identity', None) class APCloudConfiguration(msrest.serialization.Model): """APCloudConfiguration. :ivar referenced_ap_module_guid: :vartype referenced_ap_module_guid: str :ivar user_alias: :vartype user_alias: str :ivar aether_module_type: :vartype aether_module_type: str """ _attribute_map = { 'referenced_ap_module_guid': {'key': 'referencedAPModuleGuid', 'type': 'str'}, 'user_alias': {'key': 'userAlias', 'type': 'str'}, 'aether_module_type': {'key': 'aetherModuleType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword referenced_ap_module_guid: :paramtype referenced_ap_module_guid: str :keyword user_alias: :paramtype user_alias: str :keyword aether_module_type: :paramtype aether_module_type: str """ super(APCloudConfiguration, self).__init__(**kwargs) self.referenced_ap_module_guid = kwargs.get('referenced_ap_module_guid', None) self.user_alias = kwargs.get('user_alias', None) self.aether_module_type = kwargs.get('aether_module_type', None) class ApiAndParameters(msrest.serialization.Model): """ApiAndParameters. :ivar api: :vartype api: str :ivar parameters: This is a dictionary. :vartype parameters: dict[str, ~flow.models.FlowToolSettingParameter] :ivar default_prompt: :vartype default_prompt: str """ _attribute_map = { 'api': {'key': 'api', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{FlowToolSettingParameter}'}, 'default_prompt': {'key': 'default_prompt', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword api: :paramtype api: str :keyword parameters: This is a dictionary. :paramtype parameters: dict[str, ~flow.models.FlowToolSettingParameter] :keyword default_prompt: :paramtype default_prompt: str """ super(ApiAndParameters, self).__init__(**kwargs) self.api = kwargs.get('api', None) self.parameters = kwargs.get('parameters', None) self.default_prompt = kwargs.get('default_prompt', None) class ApplicationEndpointConfiguration(msrest.serialization.Model): """ApplicationEndpointConfiguration. :ivar type: Possible values include: "Jupyter", "JupyterLab", "SSH", "TensorBoard", "VSCode", "Theia", "Grafana", "Custom", "RayDashboard". :vartype type: str or ~flow.models.ApplicationEndpointType :ivar port: :vartype port: int :ivar properties: Dictionary of :code:`<string>`. :vartype properties: dict[str, str] :ivar nodes: :vartype nodes: ~flow.models.Nodes """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'port': {'key': 'port', 'type': 'int'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'nodes': {'key': 'nodes', 'type': 'Nodes'}, } def __init__( self, **kwargs ): """ :keyword type: Possible values include: "Jupyter", "JupyterLab", "SSH", "TensorBoard", "VSCode", "Theia", "Grafana", "Custom", "RayDashboard". :paramtype type: str or ~flow.models.ApplicationEndpointType :keyword port: :paramtype port: int :keyword properties: Dictionary of :code:`<string>`. :paramtype properties: dict[str, str] :keyword nodes: :paramtype nodes: ~flow.models.Nodes """ super(ApplicationEndpointConfiguration, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.port = kwargs.get('port', None) self.properties = kwargs.get('properties', None) self.nodes = kwargs.get('nodes', None) class ArgumentAssignment(msrest.serialization.Model): """ArgumentAssignment. :ivar value_type: Possible values include: "Literal", "Parameter", "Input", "Output", "NestedList", "StringInterpolationList". :vartype value_type: str or ~flow.models.ArgumentValueType :ivar value: :vartype value: str :ivar nested_argument_list: :vartype nested_argument_list: list[~flow.models.ArgumentAssignment] :ivar string_interpolation_argument_list: :vartype string_interpolation_argument_list: list[~flow.models.ArgumentAssignment] """ _attribute_map = { 'value_type': {'key': 'valueType', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'nested_argument_list': {'key': 'nestedArgumentList', 'type': '[ArgumentAssignment]'}, 'string_interpolation_argument_list': {'key': 'stringInterpolationArgumentList', 'type': '[ArgumentAssignment]'}, } def __init__( self, **kwargs ): """ :keyword value_type: Possible values include: "Literal", "Parameter", "Input", "Output", "NestedList", "StringInterpolationList". :paramtype value_type: str or ~flow.models.ArgumentValueType :keyword value: :paramtype value: str :keyword nested_argument_list: :paramtype nested_argument_list: list[~flow.models.ArgumentAssignment] :keyword string_interpolation_argument_list: :paramtype string_interpolation_argument_list: list[~flow.models.ArgumentAssignment] """ super(ArgumentAssignment, self).__init__(**kwargs) self.value_type = kwargs.get('value_type', None) self.value = kwargs.get('value', None) self.nested_argument_list = kwargs.get('nested_argument_list', None) self.string_interpolation_argument_list = kwargs.get('string_interpolation_argument_list', None) class Asset(msrest.serialization.Model): """Asset. :ivar asset_id: :vartype asset_id: str :ivar type: :vartype type: str """ _attribute_map = { 'asset_id': {'key': 'assetId', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword asset_id: :paramtype asset_id: str :keyword type: :paramtype type: str """ super(Asset, self).__init__(**kwargs) self.asset_id = kwargs.get('asset_id', None) self.type = kwargs.get('type', None) class AssetDefinition(msrest.serialization.Model): """AssetDefinition. :ivar path: :vartype path: str :ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :vartype type: str or ~flow.models.AEVAAssetType :ivar asset_id: :vartype asset_id: str :ivar serialized_asset_id: :vartype serialized_asset_id: str """ _attribute_map = { 'path': {'key': 'path', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, 'serialized_asset_id': {'key': 'serializedAssetId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword path: :paramtype path: str :keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :paramtype type: str or ~flow.models.AEVAAssetType :keyword asset_id: :paramtype asset_id: str :keyword serialized_asset_id: :paramtype serialized_asset_id: str """ super(AssetDefinition, self).__init__(**kwargs) self.path = kwargs.get('path', None) self.type = kwargs.get('type', None) self.asset_id = kwargs.get('asset_id', None) self.serialized_asset_id = kwargs.get('serialized_asset_id', None) class AssetNameAndVersionIdentifier(msrest.serialization.Model): """AssetNameAndVersionIdentifier. :ivar asset_name: :vartype asset_name: str :ivar version: :vartype version: str :ivar feed_name: :vartype feed_name: str """ _attribute_map = { 'asset_name': {'key': 'assetName', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'feed_name': {'key': 'feedName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword asset_name: :paramtype asset_name: str :keyword version: :paramtype version: str :keyword feed_name: :paramtype feed_name: str """ super(AssetNameAndVersionIdentifier, self).__init__(**kwargs) self.asset_name = kwargs.get('asset_name', None) self.version = kwargs.get('version', None) self.feed_name = kwargs.get('feed_name', None) class AssetOutputSettings(msrest.serialization.Model): """AssetOutputSettings. :ivar path: :vartype path: str :ivar path_parameter_assignment: :vartype path_parameter_assignment: ~flow.models.ParameterAssignment :ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :vartype type: str or ~flow.models.AEVAAssetType :ivar options: This is a dictionary. :vartype options: dict[str, str] :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode :ivar name: :vartype name: str :ivar version: :vartype version: str """ _attribute_map = { 'path': {'key': 'path', 'type': 'str'}, 'path_parameter_assignment': {'key': 'PathParameterAssignment', 'type': 'ParameterAssignment'}, 'type': {'key': 'type', 'type': 'str'}, 'options': {'key': 'options', 'type': '{str}'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword path: :paramtype path: str :keyword path_parameter_assignment: :paramtype path_parameter_assignment: ~flow.models.ParameterAssignment :keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :paramtype type: str or ~flow.models.AEVAAssetType :keyword options: This is a dictionary. :paramtype options: dict[str, str] :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode :keyword name: :paramtype name: str :keyword version: :paramtype version: str """ super(AssetOutputSettings, self).__init__(**kwargs) self.path = kwargs.get('path', None) self.path_parameter_assignment = kwargs.get('path_parameter_assignment', None) self.type = kwargs.get('type', None) self.options = kwargs.get('options', None) self.data_store_mode = kwargs.get('data_store_mode', None) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) class AssetOutputSettingsParameter(msrest.serialization.Model): """AssetOutputSettingsParameter. :ivar name: :vartype name: str :ivar documentation: :vartype documentation: str :ivar default_value: :vartype default_value: ~flow.models.AssetOutputSettings """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'documentation': {'key': 'documentation', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'AssetOutputSettings'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword documentation: :paramtype documentation: str :keyword default_value: :paramtype default_value: ~flow.models.AssetOutputSettings """ super(AssetOutputSettingsParameter, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.documentation = kwargs.get('documentation', None) self.default_value = kwargs.get('default_value', None) class AssetPublishResult(msrest.serialization.Model): """AssetPublishResult. :ivar feed_name: :vartype feed_name: str :ivar asset_name: :vartype asset_name: str :ivar asset_version: :vartype asset_version: str :ivar step_name: :vartype step_name: str :ivar status: :vartype status: str :ivar error_message: :vartype error_message: str :ivar created_time: :vartype created_time: ~datetime.datetime :ivar last_updated_time: :vartype last_updated_time: ~datetime.datetime :ivar regional_publish_results: Dictionary of :code:`<AssetPublishSingleRegionResult>`. :vartype regional_publish_results: dict[str, ~flow.models.AssetPublishSingleRegionResult] """ _attribute_map = { 'feed_name': {'key': 'feedName', 'type': 'str'}, 'asset_name': {'key': 'assetName', 'type': 'str'}, 'asset_version': {'key': 'assetVersion', 'type': 'str'}, 'step_name': {'key': 'stepName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'error_message': {'key': 'errorMessage', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'last_updated_time': {'key': 'lastUpdatedTime', 'type': 'iso-8601'}, 'regional_publish_results': {'key': 'regionalPublishResults', 'type': '{AssetPublishSingleRegionResult}'}, } def __init__( self, **kwargs ): """ :keyword feed_name: :paramtype feed_name: str :keyword asset_name: :paramtype asset_name: str :keyword asset_version: :paramtype asset_version: str :keyword step_name: :paramtype step_name: str :keyword status: :paramtype status: str :keyword error_message: :paramtype error_message: str :keyword created_time: :paramtype created_time: ~datetime.datetime :keyword last_updated_time: :paramtype last_updated_time: ~datetime.datetime :keyword regional_publish_results: Dictionary of :code:`<AssetPublishSingleRegionResult>`. :paramtype regional_publish_results: dict[str, ~flow.models.AssetPublishSingleRegionResult] """ super(AssetPublishResult, self).__init__(**kwargs) self.feed_name = kwargs.get('feed_name', None) self.asset_name = kwargs.get('asset_name', None) self.asset_version = kwargs.get('asset_version', None) self.step_name = kwargs.get('step_name', None) self.status = kwargs.get('status', None) self.error_message = kwargs.get('error_message', None) self.created_time = kwargs.get('created_time', None) self.last_updated_time = kwargs.get('last_updated_time', None) self.regional_publish_results = kwargs.get('regional_publish_results', None) class AssetPublishSingleRegionResult(msrest.serialization.Model): """AssetPublishSingleRegionResult. :ivar step_name: :vartype step_name: str :ivar status: :vartype status: str :ivar error_message: :vartype error_message: str :ivar last_updated_time: :vartype last_updated_time: ~datetime.datetime :ivar total_steps: :vartype total_steps: int :ivar finished_steps: :vartype finished_steps: int :ivar remaining_steps: :vartype remaining_steps: int """ _attribute_map = { 'step_name': {'key': 'stepName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'error_message': {'key': 'errorMessage', 'type': 'str'}, 'last_updated_time': {'key': 'lastUpdatedTime', 'type': 'iso-8601'}, 'total_steps': {'key': 'totalSteps', 'type': 'int'}, 'finished_steps': {'key': 'finishedSteps', 'type': 'int'}, 'remaining_steps': {'key': 'remainingSteps', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword step_name: :paramtype step_name: str :keyword status: :paramtype status: str :keyword error_message: :paramtype error_message: str :keyword last_updated_time: :paramtype last_updated_time: ~datetime.datetime :keyword total_steps: :paramtype total_steps: int :keyword finished_steps: :paramtype finished_steps: int :keyword remaining_steps: :paramtype remaining_steps: int """ super(AssetPublishSingleRegionResult, self).__init__(**kwargs) self.step_name = kwargs.get('step_name', None) self.status = kwargs.get('status', None) self.error_message = kwargs.get('error_message', None) self.last_updated_time = kwargs.get('last_updated_time', None) self.total_steps = kwargs.get('total_steps', None) self.finished_steps = kwargs.get('finished_steps', None) self.remaining_steps = kwargs.get('remaining_steps', None) class AssetTypeMetaInfo(msrest.serialization.Model): """AssetTypeMetaInfo. :ivar consumption_mode: Possible values include: "Reference", "Copy", "CopyAndAutoUpgrade". :vartype consumption_mode: str or ~flow.models.ConsumeMode """ _attribute_map = { 'consumption_mode': {'key': 'consumptionMode', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword consumption_mode: Possible values include: "Reference", "Copy", "CopyAndAutoUpgrade". :paramtype consumption_mode: str or ~flow.models.ConsumeMode """ super(AssetTypeMetaInfo, self).__init__(**kwargs) self.consumption_mode = kwargs.get('consumption_mode', None) class AssetVersionPublishRequest(msrest.serialization.Model): """AssetVersionPublishRequest. :ivar asset_type: Possible values include: "Component", "Model", "Environment", "Dataset", "DataStore", "SampleGraph", "FlowTool", "FlowToolSetting", "FlowConnection", "FlowSample", "FlowRuntimeSpec". :vartype asset_type: str or ~flow.models.AssetType :ivar asset_source_type: Possible values include: "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip". :vartype asset_source_type: str or ~flow.models.AssetSourceType :ivar yaml_file: :vartype yaml_file: str :ivar source_zip_url: :vartype source_zip_url: str :ivar source_zip_file: :vartype source_zip_file: IO :ivar feed_name: :vartype feed_name: str :ivar set_as_default_version: :vartype set_as_default_version: bool :ivar referenced_assets: :vartype referenced_assets: list[~flow.models.AssetNameAndVersionIdentifier] :ivar flow_file: :vartype flow_file: str :ivar version: :vartype version: str """ _attribute_map = { 'asset_type': {'key': 'assetType', 'type': 'str'}, 'asset_source_type': {'key': 'assetSourceType', 'type': 'str'}, 'yaml_file': {'key': 'yamlFile', 'type': 'str'}, 'source_zip_url': {'key': 'sourceZipUrl', 'type': 'str'}, 'source_zip_file': {'key': 'sourceZipFile', 'type': 'IO'}, 'feed_name': {'key': 'feedName', 'type': 'str'}, 'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'}, 'referenced_assets': {'key': 'referencedAssets', 'type': '[AssetNameAndVersionIdentifier]'}, 'flow_file': {'key': 'flowFile', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword asset_type: Possible values include: "Component", "Model", "Environment", "Dataset", "DataStore", "SampleGraph", "FlowTool", "FlowToolSetting", "FlowConnection", "FlowSample", "FlowRuntimeSpec". :paramtype asset_type: str or ~flow.models.AssetType :keyword asset_source_type: Possible values include: "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip". :paramtype asset_source_type: str or ~flow.models.AssetSourceType :keyword yaml_file: :paramtype yaml_file: str :keyword source_zip_url: :paramtype source_zip_url: str :keyword source_zip_file: :paramtype source_zip_file: IO :keyword feed_name: :paramtype feed_name: str :keyword set_as_default_version: :paramtype set_as_default_version: bool :keyword referenced_assets: :paramtype referenced_assets: list[~flow.models.AssetNameAndVersionIdentifier] :keyword flow_file: :paramtype flow_file: str :keyword version: :paramtype version: str """ super(AssetVersionPublishRequest, self).__init__(**kwargs) self.asset_type = kwargs.get('asset_type', None) self.asset_source_type = kwargs.get('asset_source_type', None) self.yaml_file = kwargs.get('yaml_file', None) self.source_zip_url = kwargs.get('source_zip_url', None) self.source_zip_file = kwargs.get('source_zip_file', None) self.feed_name = kwargs.get('feed_name', None) self.set_as_default_version = kwargs.get('set_as_default_version', None) self.referenced_assets = kwargs.get('referenced_assets', None) self.flow_file = kwargs.get('flow_file', None) self.version = kwargs.get('version', None) class AssignedUser(msrest.serialization.Model): """AssignedUser. :ivar object_id: :vartype object_id: str :ivar tenant_id: :vartype tenant_id: str """ _attribute_map = { 'object_id': {'key': 'objectId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword object_id: :paramtype object_id: str :keyword tenant_id: :paramtype tenant_id: str """ super(AssignedUser, self).__init__(**kwargs) self.object_id = kwargs.get('object_id', None) self.tenant_id = kwargs.get('tenant_id', None) class AuthKeys(msrest.serialization.Model): """AuthKeys. :ivar primary_key: :vartype primary_key: str :ivar secondary_key: :vartype secondary_key: str """ _attribute_map = { 'primary_key': {'key': 'primaryKey', 'type': 'str'}, 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword primary_key: :paramtype primary_key: str :keyword secondary_key: :paramtype secondary_key: str """ super(AuthKeys, self).__init__(**kwargs) self.primary_key = kwargs.get('primary_key', None) self.secondary_key = kwargs.get('secondary_key', None) class AutoClusterComputeSpecification(msrest.serialization.Model): """AutoClusterComputeSpecification. :ivar instance_size: :vartype instance_size: str :ivar instance_priority: :vartype instance_priority: str :ivar os_type: :vartype os_type: str :ivar location: :vartype location: str :ivar runtime_version: :vartype runtime_version: str :ivar quota_enforcement_resource_id: :vartype quota_enforcement_resource_id: str :ivar model_compute_specification_id: :vartype model_compute_specification_id: str """ _attribute_map = { 'instance_size': {'key': 'instanceSize', 'type': 'str'}, 'instance_priority': {'key': 'instancePriority', 'type': 'str'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, 'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'}, 'model_compute_specification_id': {'key': 'modelComputeSpecificationId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword instance_size: :paramtype instance_size: str :keyword instance_priority: :paramtype instance_priority: str :keyword os_type: :paramtype os_type: str :keyword location: :paramtype location: str :keyword runtime_version: :paramtype runtime_version: str :keyword quota_enforcement_resource_id: :paramtype quota_enforcement_resource_id: str :keyword model_compute_specification_id: :paramtype model_compute_specification_id: str """ super(AutoClusterComputeSpecification, self).__init__(**kwargs) self.instance_size = kwargs.get('instance_size', None) self.instance_priority = kwargs.get('instance_priority', None) self.os_type = kwargs.get('os_type', None) self.location = kwargs.get('location', None) self.runtime_version = kwargs.get('runtime_version', None) self.quota_enforcement_resource_id = kwargs.get('quota_enforcement_resource_id', None) self.model_compute_specification_id = kwargs.get('model_compute_specification_id', None) class AutoDeleteSetting(msrest.serialization.Model): """AutoDeleteSetting. :ivar condition: Possible values include: "CreatedGreaterThan", "LastAccessedGreaterThan". :vartype condition: str or ~flow.models.AutoDeleteCondition :ivar value: :vartype value: str """ _attribute_map = { 'condition': {'key': 'condition', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword condition: Possible values include: "CreatedGreaterThan", "LastAccessedGreaterThan". :paramtype condition: str or ~flow.models.AutoDeleteCondition :keyword value: :paramtype value: str """ super(AutoDeleteSetting, self).__init__(**kwargs) self.condition = kwargs.get('condition', None) self.value = kwargs.get('value', None) class AutoFeaturizeConfiguration(msrest.serialization.Model): """AutoFeaturizeConfiguration. :ivar featurization_config: :vartype featurization_config: ~flow.models.FeaturizationSettings """ _attribute_map = { 'featurization_config': {'key': 'featurizationConfig', 'type': 'FeaturizationSettings'}, } def __init__( self, **kwargs ): """ :keyword featurization_config: :paramtype featurization_config: ~flow.models.FeaturizationSettings """ super(AutoFeaturizeConfiguration, self).__init__(**kwargs) self.featurization_config = kwargs.get('featurization_config', None) class AutologgerSettings(msrest.serialization.Model): """AutologgerSettings. :ivar ml_flow_autologger: Possible values include: "Enabled", "Disabled". :vartype ml_flow_autologger: str or ~flow.models.MLFlowAutologgerState """ _attribute_map = { 'ml_flow_autologger': {'key': 'mlFlowAutologger', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword ml_flow_autologger: Possible values include: "Enabled", "Disabled". :paramtype ml_flow_autologger: str or ~flow.models.MLFlowAutologgerState """ super(AutologgerSettings, self).__init__(**kwargs) self.ml_flow_autologger = kwargs.get('ml_flow_autologger', None) class AutoMLComponentConfiguration(msrest.serialization.Model): """AutoMLComponentConfiguration. :ivar auto_train_config: :vartype auto_train_config: ~flow.models.AutoTrainConfiguration :ivar auto_featurize_config: :vartype auto_featurize_config: ~flow.models.AutoFeaturizeConfiguration """ _attribute_map = { 'auto_train_config': {'key': 'autoTrainConfig', 'type': 'AutoTrainConfiguration'}, 'auto_featurize_config': {'key': 'autoFeaturizeConfig', 'type': 'AutoFeaturizeConfiguration'}, } def __init__( self, **kwargs ): """ :keyword auto_train_config: :paramtype auto_train_config: ~flow.models.AutoTrainConfiguration :keyword auto_featurize_config: :paramtype auto_featurize_config: ~flow.models.AutoFeaturizeConfiguration """ super(AutoMLComponentConfiguration, self).__init__(**kwargs) self.auto_train_config = kwargs.get('auto_train_config', None) self.auto_featurize_config = kwargs.get('auto_featurize_config', None) class AutoScaler(msrest.serialization.Model): """AutoScaler. :ivar autoscale_enabled: :vartype autoscale_enabled: bool :ivar min_replicas: :vartype min_replicas: int :ivar max_replicas: :vartype max_replicas: int :ivar target_utilization: :vartype target_utilization: int :ivar refresh_period_in_seconds: :vartype refresh_period_in_seconds: int """ _attribute_map = { 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'}, 'min_replicas': {'key': 'minReplicas', 'type': 'int'}, 'max_replicas': {'key': 'maxReplicas', 'type': 'int'}, 'target_utilization': {'key': 'targetUtilization', 'type': 'int'}, 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword autoscale_enabled: :paramtype autoscale_enabled: bool :keyword min_replicas: :paramtype min_replicas: int :keyword max_replicas: :paramtype max_replicas: int :keyword target_utilization: :paramtype target_utilization: int :keyword refresh_period_in_seconds: :paramtype refresh_period_in_seconds: int """ super(AutoScaler, self).__init__(**kwargs) self.autoscale_enabled = kwargs.get('autoscale_enabled', None) self.min_replicas = kwargs.get('min_replicas', None) self.max_replicas = kwargs.get('max_replicas', None) self.target_utilization = kwargs.get('target_utilization', None) self.refresh_period_in_seconds = kwargs.get('refresh_period_in_seconds', None) class AutoTrainConfiguration(msrest.serialization.Model): """AutoTrainConfiguration. :ivar general_settings: :vartype general_settings: ~flow.models.GeneralSettings :ivar limit_settings: :vartype limit_settings: ~flow.models.LimitSettings :ivar data_settings: :vartype data_settings: ~flow.models.DataSettings :ivar forecasting_settings: :vartype forecasting_settings: ~flow.models.ForecastingSettings :ivar training_settings: :vartype training_settings: ~flow.models.TrainingSettings :ivar sweep_settings: :vartype sweep_settings: ~flow.models.SweepSettings :ivar image_model_settings: Dictionary of :code:`<any>`. :vartype image_model_settings: dict[str, any] :ivar properties: Dictionary of :code:`<string>`. :vartype properties: dict[str, str] :ivar compute_configuration: :vartype compute_configuration: ~flow.models.AEVAComputeConfiguration :ivar resource_configurtion: :vartype resource_configurtion: ~flow.models.AEVAResourceConfiguration :ivar environment_id: :vartype environment_id: str :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] """ _attribute_map = { 'general_settings': {'key': 'generalSettings', 'type': 'GeneralSettings'}, 'limit_settings': {'key': 'limitSettings', 'type': 'LimitSettings'}, 'data_settings': {'key': 'dataSettings', 'type': 'DataSettings'}, 'forecasting_settings': {'key': 'forecastingSettings', 'type': 'ForecastingSettings'}, 'training_settings': {'key': 'trainingSettings', 'type': 'TrainingSettings'}, 'sweep_settings': {'key': 'sweepSettings', 'type': 'SweepSettings'}, 'image_model_settings': {'key': 'imageModelSettings', 'type': '{object}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'compute_configuration': {'key': 'computeConfiguration', 'type': 'AEVAComputeConfiguration'}, 'resource_configurtion': {'key': 'resourceConfigurtion', 'type': 'AEVAResourceConfiguration'}, 'environment_id': {'key': 'environmentId', 'type': 'str'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword general_settings: :paramtype general_settings: ~flow.models.GeneralSettings :keyword limit_settings: :paramtype limit_settings: ~flow.models.LimitSettings :keyword data_settings: :paramtype data_settings: ~flow.models.DataSettings :keyword forecasting_settings: :paramtype forecasting_settings: ~flow.models.ForecastingSettings :keyword training_settings: :paramtype training_settings: ~flow.models.TrainingSettings :keyword sweep_settings: :paramtype sweep_settings: ~flow.models.SweepSettings :keyword image_model_settings: Dictionary of :code:`<any>`. :paramtype image_model_settings: dict[str, any] :keyword properties: Dictionary of :code:`<string>`. :paramtype properties: dict[str, str] :keyword compute_configuration: :paramtype compute_configuration: ~flow.models.AEVAComputeConfiguration :keyword resource_configurtion: :paramtype resource_configurtion: ~flow.models.AEVAResourceConfiguration :keyword environment_id: :paramtype environment_id: str :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] """ super(AutoTrainConfiguration, self).__init__(**kwargs) self.general_settings = kwargs.get('general_settings', None) self.limit_settings = kwargs.get('limit_settings', None) self.data_settings = kwargs.get('data_settings', None) self.forecasting_settings = kwargs.get('forecasting_settings', None) self.training_settings = kwargs.get('training_settings', None) self.sweep_settings = kwargs.get('sweep_settings', None) self.image_model_settings = kwargs.get('image_model_settings', None) self.properties = kwargs.get('properties', None) self.compute_configuration = kwargs.get('compute_configuration', None) self.resource_configurtion = kwargs.get('resource_configurtion', None) self.environment_id = kwargs.get('environment_id', None) self.environment_variables = kwargs.get('environment_variables', None) class AvailabilityResponse(msrest.serialization.Model): """AvailabilityResponse. :ivar is_available: :vartype is_available: bool :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse """ _attribute_map = { 'is_available': {'key': 'isAvailable', 'type': 'bool'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, } def __init__( self, **kwargs ): """ :keyword is_available: :paramtype is_available: bool :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse """ super(AvailabilityResponse, self).__init__(**kwargs) self.is_available = kwargs.get('is_available', None) self.error = kwargs.get('error', None) class AzureBlobReference(msrest.serialization.Model): """AzureBlobReference. :ivar container: :vartype container: str :ivar sas_token: :vartype sas_token: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'container': {'key': 'container', 'type': 'str'}, 'sas_token': {'key': 'sasToken', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword container: :paramtype container: str :keyword sas_token: :paramtype sas_token: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AzureBlobReference, self).__init__(**kwargs) self.container = kwargs.get('container', None) self.sas_token = kwargs.get('sas_token', None) self.uri = kwargs.get('uri', None) self.account = kwargs.get('account', None) self.relative_path = kwargs.get('relative_path', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) class AzureDatabaseReference(msrest.serialization.Model): """AzureDatabaseReference. :ivar table_name: :vartype table_name: str :ivar sql_query: :vartype sql_query: str :ivar stored_procedure_name: :vartype stored_procedure_name: str :ivar stored_procedure_parameters: :vartype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter] :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'table_name': {'key': 'tableName', 'type': 'str'}, 'sql_query': {'key': 'sqlQuery', 'type': 'str'}, 'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[StoredProcedureParameter]'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword table_name: :paramtype table_name: str :keyword sql_query: :paramtype sql_query: str :keyword stored_procedure_name: :paramtype stored_procedure_name: str :keyword stored_procedure_parameters: :paramtype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter] :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AzureDatabaseReference, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) self.sql_query = kwargs.get('sql_query', None) self.stored_procedure_name = kwargs.get('stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) class AzureDataLakeGen2Reference(msrest.serialization.Model): """AzureDataLakeGen2Reference. :ivar file_system_name: :vartype file_system_name: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'file_system_name': {'key': 'fileSystemName', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword file_system_name: :paramtype file_system_name: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AzureDataLakeGen2Reference, self).__init__(**kwargs) self.file_system_name = kwargs.get('file_system_name', None) self.uri = kwargs.get('uri', None) self.account = kwargs.get('account', None) self.relative_path = kwargs.get('relative_path', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) class AzureDataLakeReference(msrest.serialization.Model): """AzureDataLakeReference. :ivar tenant: :vartype tenant: str :ivar subscription: :vartype subscription: str :ivar resource_group: :vartype resource_group: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'tenant': {'key': 'tenant', 'type': 'str'}, 'subscription': {'key': 'subscription', 'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword tenant: :paramtype tenant: str :keyword subscription: :paramtype subscription: str :keyword resource_group: :paramtype resource_group: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AzureDataLakeReference, self).__init__(**kwargs) self.tenant = kwargs.get('tenant', None) self.subscription = kwargs.get('subscription', None) self.resource_group = kwargs.get('resource_group', None) self.uri = kwargs.get('uri', None) self.account = kwargs.get('account', None) self.relative_path = kwargs.get('relative_path', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) class AzureFilesReference(msrest.serialization.Model): """AzureFilesReference. :ivar share: :vartype share: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'share': {'key': 'share', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword share: :paramtype share: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AzureFilesReference, self).__init__(**kwargs) self.share = kwargs.get('share', None) self.uri = kwargs.get('uri', None) self.account = kwargs.get('account', None) self.relative_path = kwargs.get('relative_path', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) class AzureMLModuleVersionDescriptor(msrest.serialization.Model): """AzureMLModuleVersionDescriptor. :ivar module_version_id: :vartype module_version_id: str :ivar version: :vartype version: str """ _attribute_map = { 'module_version_id': {'key': 'moduleVersionId', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword module_version_id: :paramtype module_version_id: str :keyword version: :paramtype version: str """ super(AzureMLModuleVersionDescriptor, self).__init__(**kwargs) self.module_version_id = kwargs.get('module_version_id', None) self.version = kwargs.get('version', None) class AzureOpenAIDeploymentDto(msrest.serialization.Model): """AzureOpenAIDeploymentDto. :ivar name: :vartype name: str :ivar model_name: :vartype model_name: str :ivar capabilities: :vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'model_name': {'key': 'modelName', 'type': 'str'}, 'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword model_name: :paramtype model_name: str :keyword capabilities: :paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities """ super(AzureOpenAIDeploymentDto, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.model_name = kwargs.get('model_name', None) self.capabilities = kwargs.get('capabilities', None) class AzureOpenAIModelCapabilities(msrest.serialization.Model): """AzureOpenAIModelCapabilities. :ivar completion: :vartype completion: bool :ivar chat_completion: :vartype chat_completion: bool :ivar embeddings: :vartype embeddings: bool """ _attribute_map = { 'completion': {'key': 'completion', 'type': 'bool'}, 'chat_completion': {'key': 'chat_completion', 'type': 'bool'}, 'embeddings': {'key': 'embeddings', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword completion: :paramtype completion: bool :keyword chat_completion: :paramtype chat_completion: bool :keyword embeddings: :paramtype embeddings: bool """ super(AzureOpenAIModelCapabilities, self).__init__(**kwargs) self.completion = kwargs.get('completion', None) self.chat_completion = kwargs.get('chat_completion', None) self.embeddings = kwargs.get('embeddings', None) class BatchAiComputeInfo(msrest.serialization.Model): """BatchAiComputeInfo. :ivar batch_ai_subscription_id: :vartype batch_ai_subscription_id: str :ivar batch_ai_resource_group: :vartype batch_ai_resource_group: str :ivar batch_ai_workspace_name: :vartype batch_ai_workspace_name: str :ivar cluster_name: :vartype cluster_name: str :ivar native_shared_directory: :vartype native_shared_directory: str """ _attribute_map = { 'batch_ai_subscription_id': {'key': 'batchAiSubscriptionId', 'type': 'str'}, 'batch_ai_resource_group': {'key': 'batchAiResourceGroup', 'type': 'str'}, 'batch_ai_workspace_name': {'key': 'batchAiWorkspaceName', 'type': 'str'}, 'cluster_name': {'key': 'clusterName', 'type': 'str'}, 'native_shared_directory': {'key': 'nativeSharedDirectory', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword batch_ai_subscription_id: :paramtype batch_ai_subscription_id: str :keyword batch_ai_resource_group: :paramtype batch_ai_resource_group: str :keyword batch_ai_workspace_name: :paramtype batch_ai_workspace_name: str :keyword cluster_name: :paramtype cluster_name: str :keyword native_shared_directory: :paramtype native_shared_directory: str """ super(BatchAiComputeInfo, self).__init__(**kwargs) self.batch_ai_subscription_id = kwargs.get('batch_ai_subscription_id', None) self.batch_ai_resource_group = kwargs.get('batch_ai_resource_group', None) self.batch_ai_workspace_name = kwargs.get('batch_ai_workspace_name', None) self.cluster_name = kwargs.get('cluster_name', None) self.native_shared_directory = kwargs.get('native_shared_directory', None) class BatchDataInput(msrest.serialization.Model): """BatchDataInput. :ivar data_uri: :vartype data_uri: str :ivar type: :vartype type: str """ _attribute_map = { 'data_uri': {'key': 'dataUri', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_uri: :paramtype data_uri: str :keyword type: :paramtype type: str """ super(BatchDataInput, self).__init__(**kwargs) self.data_uri = kwargs.get('data_uri', None) self.type = kwargs.get('type', None) class BatchExportComponentSpecResponse(msrest.serialization.Model): """BatchExportComponentSpecResponse. :ivar component_spec_meta_infos: :vartype component_spec_meta_infos: list[~flow.models.ComponentSpecMetaInfo] :ivar errors: :vartype errors: list[~flow.models.ErrorResponse] """ _attribute_map = { 'component_spec_meta_infos': {'key': 'componentSpecMetaInfos', 'type': '[ComponentSpecMetaInfo]'}, 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, } def __init__( self, **kwargs ): """ :keyword component_spec_meta_infos: :paramtype component_spec_meta_infos: list[~flow.models.ComponentSpecMetaInfo] :keyword errors: :paramtype errors: list[~flow.models.ErrorResponse] """ super(BatchExportComponentSpecResponse, self).__init__(**kwargs) self.component_spec_meta_infos = kwargs.get('component_spec_meta_infos', None) self.errors = kwargs.get('errors', None) class BatchExportRawComponentResponse(msrest.serialization.Model): """BatchExportRawComponentResponse. :ivar raw_component_dtos: :vartype raw_component_dtos: list[~flow.models.RawComponentDto] :ivar errors: :vartype errors: list[~flow.models.ErrorResponse] """ _attribute_map = { 'raw_component_dtos': {'key': 'rawComponentDtos', 'type': '[RawComponentDto]'}, 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, } def __init__( self, **kwargs ): """ :keyword raw_component_dtos: :paramtype raw_component_dtos: list[~flow.models.RawComponentDto] :keyword errors: :paramtype errors: list[~flow.models.ErrorResponse] """ super(BatchExportRawComponentResponse, self).__init__(**kwargs) self.raw_component_dtos = kwargs.get('raw_component_dtos', None) self.errors = kwargs.get('errors', None) class BatchGetComponentHashesRequest(msrest.serialization.Model): """BatchGetComponentHashesRequest. :ivar module_hash_version: Possible values include: "IdentifierHash", "IdentifierHashV2". :vartype module_hash_version: str or ~flow.models.AetherModuleHashVersion :ivar module_entities: Dictionary of :code:`<AetherModuleEntity>`. :vartype module_entities: dict[str, ~flow.models.AetherModuleEntity] """ _attribute_map = { 'module_hash_version': {'key': 'moduleHashVersion', 'type': 'str'}, 'module_entities': {'key': 'moduleEntities', 'type': '{AetherModuleEntity}'}, } def __init__( self, **kwargs ): """ :keyword module_hash_version: Possible values include: "IdentifierHash", "IdentifierHashV2". :paramtype module_hash_version: str or ~flow.models.AetherModuleHashVersion :keyword module_entities: Dictionary of :code:`<AetherModuleEntity>`. :paramtype module_entities: dict[str, ~flow.models.AetherModuleEntity] """ super(BatchGetComponentHashesRequest, self).__init__(**kwargs) self.module_hash_version = kwargs.get('module_hash_version', None) self.module_entities = kwargs.get('module_entities', None) class BatchGetComponentRequest(msrest.serialization.Model): """BatchGetComponentRequest. :ivar version_ids: :vartype version_ids: list[str] :ivar name_and_versions: :vartype name_and_versions: list[~flow.models.ComponentNameMetaInfo] """ _attribute_map = { 'version_ids': {'key': 'versionIds', 'type': '[str]'}, 'name_and_versions': {'key': 'nameAndVersions', 'type': '[ComponentNameMetaInfo]'}, } def __init__( self, **kwargs ): """ :keyword version_ids: :paramtype version_ids: list[str] :keyword name_and_versions: :paramtype name_and_versions: list[~flow.models.ComponentNameMetaInfo] """ super(BatchGetComponentRequest, self).__init__(**kwargs) self.version_ids = kwargs.get('version_ids', None) self.name_and_versions = kwargs.get('name_and_versions', None) class Binding(msrest.serialization.Model): """Binding. :ivar binding_type: The only acceptable values to pass in are None and "Basic". The default value is None. :vartype binding_type: str """ _attribute_map = { 'binding_type': {'key': 'bindingType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword binding_type: The only acceptable values to pass in are None and "Basic". The default value is None. :paramtype binding_type: str """ super(Binding, self).__init__(**kwargs) self.binding_type = kwargs.get('binding_type', None) class BulkTestDto(msrest.serialization.Model): """BulkTestDto. :ivar bulk_test_id: :vartype bulk_test_id: str :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar runtime: :vartype runtime: str :ivar created_by: :vartype created_by: ~flow.models.SchemaContractsCreatedBy :ivar created_on: :vartype created_on: ~datetime.datetime :ivar evaluation_count: :vartype evaluation_count: int :ivar variant_count: :vartype variant_count: int :ivar flow_submit_run_settings: :vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.FlowInputDefinition] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.FlowOutputDefinition] :ivar batch_inputs: :vartype batch_inputs: list[dict[str, any]] :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput """ _attribute_map = { 'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'runtime': {'key': 'runtime', 'type': 'str'}, 'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'evaluation_count': {'key': 'evaluationCount', 'type': 'int'}, 'variant_count': {'key': 'variantCount', 'type': 'int'}, 'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'}, 'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'}, 'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'}, 'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, } def __init__( self, **kwargs ): """ :keyword bulk_test_id: :paramtype bulk_test_id: str :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword runtime: :paramtype runtime: str :keyword created_by: :paramtype created_by: ~flow.models.SchemaContractsCreatedBy :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword evaluation_count: :paramtype evaluation_count: int :keyword variant_count: :paramtype variant_count: int :keyword flow_submit_run_settings: :paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.FlowInputDefinition] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition] :keyword batch_inputs: :paramtype batch_inputs: list[dict[str, any]] :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput """ super(BulkTestDto, self).__init__(**kwargs) self.bulk_test_id = kwargs.get('bulk_test_id', None) self.display_name = kwargs.get('display_name', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.runtime = kwargs.get('runtime', None) self.created_by = kwargs.get('created_by', None) self.created_on = kwargs.get('created_on', None) self.evaluation_count = kwargs.get('evaluation_count', None) self.variant_count = kwargs.get('variant_count', None) self.flow_submit_run_settings = kwargs.get('flow_submit_run_settings', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.batch_inputs = kwargs.get('batch_inputs', None) self.batch_data_input = kwargs.get('batch_data_input', None) class CloudError(msrest.serialization.Model): """CloudError. Variables are only populated by the server, and will be ignored when sending a request. :ivar code: :vartype code: str :ivar message: :vartype message: str :ivar target: :vartype target: str :ivar details: :vartype details: list[~flow.models.CloudError] :ivar additional_info: :vartype additional_info: list[~flow.models.AdditionalErrorInfo] """ _validation = { 'details': {'readonly': True}, 'additional_info': {'readonly': True}, } _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[CloudError]'}, 'additional_info': {'key': 'additionalInfo', 'type': '[AdditionalErrorInfo]'}, } def __init__( self, **kwargs ): """ :keyword code: :paramtype code: str :keyword message: :paramtype message: str :keyword target: :paramtype target: str """ super(CloudError, self).__init__(**kwargs) self.code = kwargs.get('code', None) self.message = kwargs.get('message', None) self.target = kwargs.get('target', None) self.details = None self.additional_info = None class CloudPrioritySetting(msrest.serialization.Model): """CloudPrioritySetting. :ivar scope_priority: :vartype scope_priority: ~flow.models.PriorityConfiguration :ivar aml_compute_priority: :vartype aml_compute_priority: ~flow.models.PriorityConfiguration :ivar itp_priority: :vartype itp_priority: ~flow.models.PriorityConfiguration :ivar singularity_priority: :vartype singularity_priority: ~flow.models.PriorityConfiguration """ _attribute_map = { 'scope_priority': {'key': 'scopePriority', 'type': 'PriorityConfiguration'}, 'aml_compute_priority': {'key': 'AmlComputePriority', 'type': 'PriorityConfiguration'}, 'itp_priority': {'key': 'ItpPriority', 'type': 'PriorityConfiguration'}, 'singularity_priority': {'key': 'SingularityPriority', 'type': 'PriorityConfiguration'}, } def __init__( self, **kwargs ): """ :keyword scope_priority: :paramtype scope_priority: ~flow.models.PriorityConfiguration :keyword aml_compute_priority: :paramtype aml_compute_priority: ~flow.models.PriorityConfiguration :keyword itp_priority: :paramtype itp_priority: ~flow.models.PriorityConfiguration :keyword singularity_priority: :paramtype singularity_priority: ~flow.models.PriorityConfiguration """ super(CloudPrioritySetting, self).__init__(**kwargs) self.scope_priority = kwargs.get('scope_priority', None) self.aml_compute_priority = kwargs.get('aml_compute_priority', None) self.itp_priority = kwargs.get('itp_priority', None) self.singularity_priority = kwargs.get('singularity_priority', None) class CloudSettings(msrest.serialization.Model): """CloudSettings. :ivar linked_settings: :vartype linked_settings: list[~flow.models.ParameterAssignment] :ivar priority_config: :vartype priority_config: ~flow.models.PriorityConfiguration :ivar hdi_run_config: :vartype hdi_run_config: ~flow.models.HdiRunConfiguration :ivar sub_graph_config: :vartype sub_graph_config: ~flow.models.SubGraphConfiguration :ivar auto_ml_component_config: :vartype auto_ml_component_config: ~flow.models.AutoMLComponentConfiguration :ivar ap_cloud_config: :vartype ap_cloud_config: ~flow.models.APCloudConfiguration :ivar scope_cloud_config: :vartype scope_cloud_config: ~flow.models.ScopeCloudConfiguration :ivar es_cloud_config: :vartype es_cloud_config: ~flow.models.EsCloudConfiguration :ivar data_transfer_cloud_config: :vartype data_transfer_cloud_config: ~flow.models.DataTransferCloudConfiguration :ivar aml_spark_cloud_setting: :vartype aml_spark_cloud_setting: ~flow.models.AmlSparkCloudSetting :ivar data_transfer_v2_cloud_setting: :vartype data_transfer_v2_cloud_setting: ~flow.models.DataTransferV2CloudSetting """ _attribute_map = { 'linked_settings': {'key': 'linkedSettings', 'type': '[ParameterAssignment]'}, 'priority_config': {'key': 'priorityConfig', 'type': 'PriorityConfiguration'}, 'hdi_run_config': {'key': 'hdiRunConfig', 'type': 'HdiRunConfiguration'}, 'sub_graph_config': {'key': 'subGraphConfig', 'type': 'SubGraphConfiguration'}, 'auto_ml_component_config': {'key': 'autoMLComponentConfig', 'type': 'AutoMLComponentConfiguration'}, 'ap_cloud_config': {'key': 'apCloudConfig', 'type': 'APCloudConfiguration'}, 'scope_cloud_config': {'key': 'scopeCloudConfig', 'type': 'ScopeCloudConfiguration'}, 'es_cloud_config': {'key': 'esCloudConfig', 'type': 'EsCloudConfiguration'}, 'data_transfer_cloud_config': {'key': 'dataTransferCloudConfig', 'type': 'DataTransferCloudConfiguration'}, 'aml_spark_cloud_setting': {'key': 'amlSparkCloudSetting', 'type': 'AmlSparkCloudSetting'}, 'data_transfer_v2_cloud_setting': {'key': 'dataTransferV2CloudSetting', 'type': 'DataTransferV2CloudSetting'}, } def __init__( self, **kwargs ): """ :keyword linked_settings: :paramtype linked_settings: list[~flow.models.ParameterAssignment] :keyword priority_config: :paramtype priority_config: ~flow.models.PriorityConfiguration :keyword hdi_run_config: :paramtype hdi_run_config: ~flow.models.HdiRunConfiguration :keyword sub_graph_config: :paramtype sub_graph_config: ~flow.models.SubGraphConfiguration :keyword auto_ml_component_config: :paramtype auto_ml_component_config: ~flow.models.AutoMLComponentConfiguration :keyword ap_cloud_config: :paramtype ap_cloud_config: ~flow.models.APCloudConfiguration :keyword scope_cloud_config: :paramtype scope_cloud_config: ~flow.models.ScopeCloudConfiguration :keyword es_cloud_config: :paramtype es_cloud_config: ~flow.models.EsCloudConfiguration :keyword data_transfer_cloud_config: :paramtype data_transfer_cloud_config: ~flow.models.DataTransferCloudConfiguration :keyword aml_spark_cloud_setting: :paramtype aml_spark_cloud_setting: ~flow.models.AmlSparkCloudSetting :keyword data_transfer_v2_cloud_setting: :paramtype data_transfer_v2_cloud_setting: ~flow.models.DataTransferV2CloudSetting """ super(CloudSettings, self).__init__(**kwargs) self.linked_settings = kwargs.get('linked_settings', None) self.priority_config = kwargs.get('priority_config', None) self.hdi_run_config = kwargs.get('hdi_run_config', None) self.sub_graph_config = kwargs.get('sub_graph_config', None) self.auto_ml_component_config = kwargs.get('auto_ml_component_config', None) self.ap_cloud_config = kwargs.get('ap_cloud_config', None) self.scope_cloud_config = kwargs.get('scope_cloud_config', None) self.es_cloud_config = kwargs.get('es_cloud_config', None) self.data_transfer_cloud_config = kwargs.get('data_transfer_cloud_config', None) self.aml_spark_cloud_setting = kwargs.get('aml_spark_cloud_setting', None) self.data_transfer_v2_cloud_setting = kwargs.get('data_transfer_v2_cloud_setting', None) class ColumnTransformer(msrest.serialization.Model): """ColumnTransformer. :ivar fields: :vartype fields: list[str] :ivar parameters: Anything. :vartype parameters: any """ _attribute_map = { 'fields': {'key': 'fields', 'type': '[str]'}, 'parameters': {'key': 'parameters', 'type': 'object'}, } def __init__( self, **kwargs ): """ :keyword fields: :paramtype fields: list[str] :keyword parameters: Anything. :paramtype parameters: any """ super(ColumnTransformer, self).__init__(**kwargs) self.fields = kwargs.get('fields', None) self.parameters = kwargs.get('parameters', None) class CommandJob(msrest.serialization.Model): """CommandJob. :ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :vartype job_type: str or ~flow.models.JobType :ivar code_id: :vartype code_id: str :ivar command: :vartype command: str :ivar environment_id: :vartype environment_id: str :ivar input_data_bindings: Dictionary of :code:`<InputDataBinding>`. :vartype input_data_bindings: dict[str, ~flow.models.InputDataBinding] :ivar output_data_bindings: Dictionary of :code:`<OutputDataBinding>`. :vartype output_data_bindings: dict[str, ~flow.models.OutputDataBinding] :ivar distribution: :vartype distribution: ~flow.models.DistributionConfiguration :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar parameters: Dictionary of :code:`<any>`. :vartype parameters: dict[str, any] :ivar autologger_settings: :vartype autologger_settings: ~flow.models.MfeInternalAutologgerSettings :ivar limits: :vartype limits: ~flow.models.CommandJobLimits :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled", "InProgress". :vartype provisioning_state: str or ~flow.models.JobProvisioningState :ivar parent_job_name: :vartype parent_job_name: str :ivar display_name: :vartype display_name: str :ivar experiment_name: :vartype experiment_name: str :ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". :vartype status: str or ~flow.models.JobStatus :ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`. :vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint] :ivar identity: :vartype identity: ~flow.models.MfeInternalIdentityConfiguration :ivar compute: :vartype compute: ~flow.models.ComputeConfiguration :ivar priority: :vartype priority: int :ivar output: :vartype output: ~flow.models.JobOutputArtifacts :ivar is_archived: :vartype is_archived: bool :ivar schedule: :vartype schedule: ~flow.models.ScheduleBase :ivar component_id: :vartype component_id: str :ivar notification_setting: :vartype notification_setting: ~flow.models.NotificationSetting :ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`. :vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration] :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _validation = { 'command': {'min_length': 1}, } _attribute_map = { 'job_type': {'key': 'jobType', 'type': 'str'}, 'code_id': {'key': 'codeId', 'type': 'str'}, 'command': {'key': 'command', 'type': 'str'}, 'environment_id': {'key': 'environmentId', 'type': 'str'}, 'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'}, 'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'}, 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, 'autologger_settings': {'key': 'autologgerSettings', 'type': 'MfeInternalAutologgerSettings'}, 'limits': {'key': 'limits', 'type': 'CommandJobLimits'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'parent_job_name': {'key': 'parentJobName', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, 'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'}, 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, 'priority': {'key': 'priority', 'type': 'int'}, 'output': {'key': 'output', 'type': 'JobOutputArtifacts'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'schedule': {'key': 'schedule', 'type': 'ScheduleBase'}, 'component_id': {'key': 'componentId', 'type': 'str'}, 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :paramtype job_type: str or ~flow.models.JobType :keyword code_id: :paramtype code_id: str :keyword command: :paramtype command: str :keyword environment_id: :paramtype environment_id: str :keyword input_data_bindings: Dictionary of :code:`<InputDataBinding>`. :paramtype input_data_bindings: dict[str, ~flow.models.InputDataBinding] :keyword output_data_bindings: Dictionary of :code:`<OutputDataBinding>`. :paramtype output_data_bindings: dict[str, ~flow.models.OutputDataBinding] :keyword distribution: :paramtype distribution: ~flow.models.DistributionConfiguration :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword parameters: Dictionary of :code:`<any>`. :paramtype parameters: dict[str, any] :keyword autologger_settings: :paramtype autologger_settings: ~flow.models.MfeInternalAutologgerSettings :keyword limits: :paramtype limits: ~flow.models.CommandJobLimits :keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled", "InProgress". :paramtype provisioning_state: str or ~flow.models.JobProvisioningState :keyword parent_job_name: :paramtype parent_job_name: str :keyword display_name: :paramtype display_name: str :keyword experiment_name: :paramtype experiment_name: str :keyword status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". :paramtype status: str or ~flow.models.JobStatus :keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`. :paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint] :keyword identity: :paramtype identity: ~flow.models.MfeInternalIdentityConfiguration :keyword compute: :paramtype compute: ~flow.models.ComputeConfiguration :keyword priority: :paramtype priority: int :keyword output: :paramtype output: ~flow.models.JobOutputArtifacts :keyword is_archived: :paramtype is_archived: bool :keyword schedule: :paramtype schedule: ~flow.models.ScheduleBase :keyword component_id: :paramtype component_id: str :keyword notification_setting: :paramtype notification_setting: ~flow.models.NotificationSetting :keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`. :paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration] :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(CommandJob, self).__init__(**kwargs) self.job_type = kwargs.get('job_type', None) self.code_id = kwargs.get('code_id', None) self.command = kwargs.get('command', None) self.environment_id = kwargs.get('environment_id', None) self.input_data_bindings = kwargs.get('input_data_bindings', None) self.output_data_bindings = kwargs.get('output_data_bindings', None) self.distribution = kwargs.get('distribution', None) self.environment_variables = kwargs.get('environment_variables', None) self.parameters = kwargs.get('parameters', None) self.autologger_settings = kwargs.get('autologger_settings', None) self.limits = kwargs.get('limits', None) self.provisioning_state = kwargs.get('provisioning_state', None) self.parent_job_name = kwargs.get('parent_job_name', None) self.display_name = kwargs.get('display_name', None) self.experiment_name = kwargs.get('experiment_name', None) self.status = kwargs.get('status', None) self.interaction_endpoints = kwargs.get('interaction_endpoints', None) self.identity = kwargs.get('identity', None) self.compute = kwargs.get('compute', None) self.priority = kwargs.get('priority', None) self.output = kwargs.get('output', None) self.is_archived = kwargs.get('is_archived', None) self.schedule = kwargs.get('schedule', None) self.component_id = kwargs.get('component_id', None) self.notification_setting = kwargs.get('notification_setting', None) self.secrets_configuration = kwargs.get('secrets_configuration', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) class CommandJobLimits(msrest.serialization.Model): """CommandJobLimits. :ivar job_limits_type: Possible values include: "Command", "Sweep". :vartype job_limits_type: str or ~flow.models.JobLimitsType :ivar timeout: :vartype timeout: str """ _attribute_map = { 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, 'timeout': {'key': 'timeout', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword job_limits_type: Possible values include: "Command", "Sweep". :paramtype job_limits_type: str or ~flow.models.JobLimitsType :keyword timeout: :paramtype timeout: str """ super(CommandJobLimits, self).__init__(**kwargs) self.job_limits_type = kwargs.get('job_limits_type', None) self.timeout = kwargs.get('timeout', None) class CommandReturnCodeConfig(msrest.serialization.Model): """CommandReturnCodeConfig. :ivar return_code: Possible values include: "Zero", "ZeroOrGreater". :vartype return_code: str or ~flow.models.SuccessfulCommandReturnCode :ivar successful_return_codes: :vartype successful_return_codes: list[int] """ _attribute_map = { 'return_code': {'key': 'returnCode', 'type': 'str'}, 'successful_return_codes': {'key': 'successfulReturnCodes', 'type': '[int]'}, } def __init__( self, **kwargs ): """ :keyword return_code: Possible values include: "Zero", "ZeroOrGreater". :paramtype return_code: str or ~flow.models.SuccessfulCommandReturnCode :keyword successful_return_codes: :paramtype successful_return_codes: list[int] """ super(CommandReturnCodeConfig, self).__init__(**kwargs) self.return_code = kwargs.get('return_code', None) self.successful_return_codes = kwargs.get('successful_return_codes', None) class ComponentConfiguration(msrest.serialization.Model): """ComponentConfiguration. :ivar component_identifier: :vartype component_identifier: str """ _attribute_map = { 'component_identifier': {'key': 'componentIdentifier', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword component_identifier: :paramtype component_identifier: str """ super(ComponentConfiguration, self).__init__(**kwargs) self.component_identifier = kwargs.get('component_identifier', None) class ComponentInput(msrest.serialization.Model): """ComponentInput. :ivar name: :vartype name: str :ivar optional: :vartype optional: bool :ivar description: :vartype description: str :ivar type: :vartype type: str :ivar default: :vartype default: str :ivar enum: :vartype enum: list[str] :ivar min: :vartype min: str :ivar max: :vartype max: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'optional': {'key': 'optional', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'default': {'key': 'default', 'type': 'str'}, 'enum': {'key': 'enum', 'type': '[str]'}, 'min': {'key': 'min', 'type': 'str'}, 'max': {'key': 'max', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword optional: :paramtype optional: bool :keyword description: :paramtype description: str :keyword type: :paramtype type: str :keyword default: :paramtype default: str :keyword enum: :paramtype enum: list[str] :keyword min: :paramtype min: str :keyword max: :paramtype max: str """ super(ComponentInput, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.optional = kwargs.get('optional', None) self.description = kwargs.get('description', None) self.type = kwargs.get('type', None) self.default = kwargs.get('default', None) self.enum = kwargs.get('enum', None) self.min = kwargs.get('min', None) self.max = kwargs.get('max', None) class ComponentJob(msrest.serialization.Model): """ComponentJob. :ivar compute: :vartype compute: ~flow.models.ComputeConfiguration :ivar component_id: :vartype component_id: str :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.ComponentJobInput] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.ComponentJobOutput] """ _attribute_map = { 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, 'component_id': {'key': 'componentId', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '{ComponentJobInput}'}, 'outputs': {'key': 'outputs', 'type': '{ComponentJobOutput}'}, } def __init__( self, **kwargs ): """ :keyword compute: :paramtype compute: ~flow.models.ComputeConfiguration :keyword component_id: :paramtype component_id: str :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.ComponentJobInput] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.ComponentJobOutput] """ super(ComponentJob, self).__init__(**kwargs) self.compute = kwargs.get('compute', None) self.component_id = kwargs.get('component_id', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) class ComponentJobInput(msrest.serialization.Model): """ComponentJobInput. :ivar data: :vartype data: ~flow.models.InputData :ivar input_binding: :vartype input_binding: str """ _attribute_map = { 'data': {'key': 'data', 'type': 'InputData'}, 'input_binding': {'key': 'inputBinding', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data: :paramtype data: ~flow.models.InputData :keyword input_binding: :paramtype input_binding: str """ super(ComponentJobInput, self).__init__(**kwargs) self.data = kwargs.get('data', None) self.input_binding = kwargs.get('input_binding', None) class ComponentJobOutput(msrest.serialization.Model): """ComponentJobOutput. :ivar data: :vartype data: ~flow.models.MfeInternalOutputData :ivar output_binding: :vartype output_binding: str """ _attribute_map = { 'data': {'key': 'data', 'type': 'MfeInternalOutputData'}, 'output_binding': {'key': 'outputBinding', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data: :paramtype data: ~flow.models.MfeInternalOutputData :keyword output_binding: :paramtype output_binding: str """ super(ComponentJobOutput, self).__init__(**kwargs) self.data = kwargs.get('data', None) self.output_binding = kwargs.get('output_binding', None) class ComponentNameAndDefaultVersion(msrest.serialization.Model): """ComponentNameAndDefaultVersion. :ivar component_name: :vartype component_name: str :ivar version: :vartype version: str :ivar feed_name: :vartype feed_name: str :ivar registry_name: :vartype registry_name: str """ _attribute_map = { 'component_name': {'key': 'componentName', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'feed_name': {'key': 'feedName', 'type': 'str'}, 'registry_name': {'key': 'registryName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword component_name: :paramtype component_name: str :keyword version: :paramtype version: str :keyword feed_name: :paramtype feed_name: str :keyword registry_name: :paramtype registry_name: str """ super(ComponentNameAndDefaultVersion, self).__init__(**kwargs) self.component_name = kwargs.get('component_name', None) self.version = kwargs.get('version', None) self.feed_name = kwargs.get('feed_name', None) self.registry_name = kwargs.get('registry_name', None) class ComponentNameMetaInfo(msrest.serialization.Model): """ComponentNameMetaInfo. :ivar feed_name: :vartype feed_name: str :ivar component_name: :vartype component_name: str :ivar component_version: :vartype component_version: str :ivar registry_name: :vartype registry_name: str """ _attribute_map = { 'feed_name': {'key': 'feedName', 'type': 'str'}, 'component_name': {'key': 'componentName', 'type': 'str'}, 'component_version': {'key': 'componentVersion', 'type': 'str'}, 'registry_name': {'key': 'registryName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword feed_name: :paramtype feed_name: str :keyword component_name: :paramtype component_name: str :keyword component_version: :paramtype component_version: str :keyword registry_name: :paramtype registry_name: str """ super(ComponentNameMetaInfo, self).__init__(**kwargs) self.feed_name = kwargs.get('feed_name', None) self.component_name = kwargs.get('component_name', None) self.component_version = kwargs.get('component_version', None) self.registry_name = kwargs.get('registry_name', None) class ComponentOutput(msrest.serialization.Model): """ComponentOutput. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar type: :vartype type: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword type: :paramtype type: str """ super(ComponentOutput, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.type = kwargs.get('type', None) class ComponentPreflightResult(msrest.serialization.Model): """ComponentPreflightResult. :ivar error_details: :vartype error_details: list[~flow.models.RootError] """ _attribute_map = { 'error_details': {'key': 'errorDetails', 'type': '[RootError]'}, } def __init__( self, **kwargs ): """ :keyword error_details: :paramtype error_details: list[~flow.models.RootError] """ super(ComponentPreflightResult, self).__init__(**kwargs) self.error_details = kwargs.get('error_details', None) class ComponentSpecMetaInfo(msrest.serialization.Model): """ComponentSpecMetaInfo. :ivar component_spec: Anything. :vartype component_spec: any :ivar component_version: :vartype component_version: str :ivar is_anonymous: :vartype is_anonymous: bool :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar component_name: :vartype component_name: str :ivar description: :vartype description: str :ivar is_archived: :vartype is_archived: bool """ _attribute_map = { 'component_spec': {'key': 'componentSpec', 'type': 'object'}, 'component_version': {'key': 'componentVersion', 'type': 'str'}, 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'component_name': {'key': 'componentName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword component_spec: Anything. :paramtype component_spec: any :keyword component_version: :paramtype component_version: str :keyword is_anonymous: :paramtype is_anonymous: bool :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword component_name: :paramtype component_name: str :keyword description: :paramtype description: str :keyword is_archived: :paramtype is_archived: bool """ super(ComponentSpecMetaInfo, self).__init__(**kwargs) self.component_spec = kwargs.get('component_spec', None) self.component_version = kwargs.get('component_version', None) self.is_anonymous = kwargs.get('is_anonymous', None) self.properties = kwargs.get('properties', None) self.tags = kwargs.get('tags', None) self.component_name = kwargs.get('component_name', None) self.description = kwargs.get('description', None) self.is_archived = kwargs.get('is_archived', None) class ComponentUpdateRequest(msrest.serialization.Model): """ComponentUpdateRequest. :ivar original_module_entity: :vartype original_module_entity: ~flow.models.ModuleEntity :ivar update_module_entity: :vartype update_module_entity: ~flow.models.ModuleEntity :ivar module_name: :vartype module_name: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar overwrite_with_original_name_and_version: :vartype overwrite_with_original_name_and_version: bool :ivar snapshot_id: :vartype snapshot_id: str """ _attribute_map = { 'original_module_entity': {'key': 'originalModuleEntity', 'type': 'ModuleEntity'}, 'update_module_entity': {'key': 'updateModuleEntity', 'type': 'ModuleEntity'}, 'module_name': {'key': 'moduleName', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'overwrite_with_original_name_and_version': {'key': 'overwriteWithOriginalNameAndVersion', 'type': 'bool'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword original_module_entity: :paramtype original_module_entity: ~flow.models.ModuleEntity :keyword update_module_entity: :paramtype update_module_entity: ~flow.models.ModuleEntity :keyword module_name: :paramtype module_name: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword overwrite_with_original_name_and_version: :paramtype overwrite_with_original_name_and_version: bool :keyword snapshot_id: :paramtype snapshot_id: str """ super(ComponentUpdateRequest, self).__init__(**kwargs) self.original_module_entity = kwargs.get('original_module_entity', None) self.update_module_entity = kwargs.get('update_module_entity', None) self.module_name = kwargs.get('module_name', None) self.properties = kwargs.get('properties', None) self.overwrite_with_original_name_and_version = kwargs.get('overwrite_with_original_name_and_version', None) self.snapshot_id = kwargs.get('snapshot_id', None) class ComponentValidationRequest(msrest.serialization.Model): """ComponentValidationRequest. :ivar component_identifier: :vartype component_identifier: str :ivar compute_identity: :vartype compute_identity: ~flow.models.ComputeIdentityDto :ivar execution_context_dto: :vartype execution_context_dto: ~flow.models.ExecutionContextDto :ivar environment_definition: :vartype environment_definition: ~flow.models.EnvironmentDefinitionDto :ivar data_port_dtos: :vartype data_port_dtos: list[~flow.models.DataPortDto] """ _attribute_map = { 'component_identifier': {'key': 'componentIdentifier', 'type': 'str'}, 'compute_identity': {'key': 'computeIdentity', 'type': 'ComputeIdentityDto'}, 'execution_context_dto': {'key': 'executionContextDto', 'type': 'ExecutionContextDto'}, 'environment_definition': {'key': 'environmentDefinition', 'type': 'EnvironmentDefinitionDto'}, 'data_port_dtos': {'key': 'dataPortDtos', 'type': '[DataPortDto]'}, } def __init__( self, **kwargs ): """ :keyword component_identifier: :paramtype component_identifier: str :keyword compute_identity: :paramtype compute_identity: ~flow.models.ComputeIdentityDto :keyword execution_context_dto: :paramtype execution_context_dto: ~flow.models.ExecutionContextDto :keyword environment_definition: :paramtype environment_definition: ~flow.models.EnvironmentDefinitionDto :keyword data_port_dtos: :paramtype data_port_dtos: list[~flow.models.DataPortDto] """ super(ComponentValidationRequest, self).__init__(**kwargs) self.component_identifier = kwargs.get('component_identifier', None) self.compute_identity = kwargs.get('compute_identity', None) self.execution_context_dto = kwargs.get('execution_context_dto', None) self.environment_definition = kwargs.get('environment_definition', None) self.data_port_dtos = kwargs.get('data_port_dtos', None) class ComponentValidationResponse(msrest.serialization.Model): """ComponentValidationResponse. :ivar status: Possible values include: "Succeeded", "Failed". :vartype status: str or ~flow.models.ValidationStatus :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse """ _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, } def __init__( self, **kwargs ): """ :keyword status: Possible values include: "Succeeded", "Failed". :paramtype status: str or ~flow.models.ValidationStatus :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse """ super(ComponentValidationResponse, self).__init__(**kwargs) self.status = kwargs.get('status', None) self.error = kwargs.get('error', None) class Compute(msrest.serialization.Model): """Compute. :ivar target: :vartype target: str :ivar target_type: :vartype target_type: str :ivar vm_size: :vartype vm_size: str :ivar instance_type: :vartype instance_type: str :ivar instance_count: :vartype instance_count: int :ivar gpu_count: :vartype gpu_count: int :ivar priority: :vartype priority: str :ivar region: :vartype region: str :ivar arm_id: :vartype arm_id: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'target': {'key': 'target', 'type': 'str'}, 'target_type': {'key': 'targetType', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'gpu_count': {'key': 'gpuCount', 'type': 'int'}, 'priority': {'key': 'priority', 'type': 'str'}, 'region': {'key': 'region', 'type': 'str'}, 'arm_id': {'key': 'armId', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword target: :paramtype target: str :keyword target_type: :paramtype target_type: str :keyword vm_size: :paramtype vm_size: str :keyword instance_type: :paramtype instance_type: str :keyword instance_count: :paramtype instance_count: int :keyword gpu_count: :paramtype gpu_count: int :keyword priority: :paramtype priority: str :keyword region: :paramtype region: str :keyword arm_id: :paramtype arm_id: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(Compute, self).__init__(**kwargs) self.target = kwargs.get('target', None) self.target_type = kwargs.get('target_type', None) self.vm_size = kwargs.get('vm_size', None) self.instance_type = kwargs.get('instance_type', None) self.instance_count = kwargs.get('instance_count', None) self.gpu_count = kwargs.get('gpu_count', None) self.priority = kwargs.get('priority', None) self.region = kwargs.get('region', None) self.arm_id = kwargs.get('arm_id', None) self.properties = kwargs.get('properties', None) class ComputeConfiguration(msrest.serialization.Model): """ComputeConfiguration. :ivar target: :vartype target: str :ivar instance_count: :vartype instance_count: int :ivar max_instance_count: :vartype max_instance_count: int :ivar is_local: :vartype is_local: bool :ivar location: :vartype location: str :ivar is_clusterless: :vartype is_clusterless: bool :ivar instance_type: :vartype instance_type: str :ivar instance_priority: :vartype instance_priority: str :ivar job_priority: :vartype job_priority: int :ivar shm_size: :vartype shm_size: str :ivar docker_args: :vartype docker_args: str :ivar locations: :vartype locations: list[str] :ivar properties: Dictionary of :code:`<any>`. :vartype properties: dict[str, any] """ _attribute_map = { 'target': {'key': 'target', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'}, 'is_local': {'key': 'isLocal', 'type': 'bool'}, 'location': {'key': 'location', 'type': 'str'}, 'is_clusterless': {'key': 'isClusterless', 'type': 'bool'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'instance_priority': {'key': 'instancePriority', 'type': 'str'}, 'job_priority': {'key': 'jobPriority', 'type': 'int'}, 'shm_size': {'key': 'shmSize', 'type': 'str'}, 'docker_args': {'key': 'dockerArgs', 'type': 'str'}, 'locations': {'key': 'locations', 'type': '[str]'}, 'properties': {'key': 'properties', 'type': '{object}'}, } def __init__( self, **kwargs ): """ :keyword target: :paramtype target: str :keyword instance_count: :paramtype instance_count: int :keyword max_instance_count: :paramtype max_instance_count: int :keyword is_local: :paramtype is_local: bool :keyword location: :paramtype location: str :keyword is_clusterless: :paramtype is_clusterless: bool :keyword instance_type: :paramtype instance_type: str :keyword instance_priority: :paramtype instance_priority: str :keyword job_priority: :paramtype job_priority: int :keyword shm_size: :paramtype shm_size: str :keyword docker_args: :paramtype docker_args: str :keyword locations: :paramtype locations: list[str] :keyword properties: Dictionary of :code:`<any>`. :paramtype properties: dict[str, any] """ super(ComputeConfiguration, self).__init__(**kwargs) self.target = kwargs.get('target', None) self.instance_count = kwargs.get('instance_count', None) self.max_instance_count = kwargs.get('max_instance_count', None) self.is_local = kwargs.get('is_local', None) self.location = kwargs.get('location', None) self.is_clusterless = kwargs.get('is_clusterless', None) self.instance_type = kwargs.get('instance_type', None) self.instance_priority = kwargs.get('instance_priority', None) self.job_priority = kwargs.get('job_priority', None) self.shm_size = kwargs.get('shm_size', None) self.docker_args = kwargs.get('docker_args', None) self.locations = kwargs.get('locations', None) self.properties = kwargs.get('properties', None) class ComputeContract(msrest.serialization.Model): """ComputeContract. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar type: :vartype type: str :ivar location: :vartype location: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar identity: :vartype identity: ~flow.models.ComputeIdentityContract :ivar properties: :vartype properties: ~flow.models.ComputeProperties """ _validation = { 'type': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'identity': {'key': 'identity', 'type': 'ComputeIdentityContract'}, 'properties': {'key': 'properties', 'type': 'ComputeProperties'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword location: :paramtype location: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword identity: :paramtype identity: ~flow.models.ComputeIdentityContract :keyword properties: :paramtype properties: ~flow.models.ComputeProperties """ super(ComputeContract, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.name = kwargs.get('name', None) self.type = None self.location = kwargs.get('location', None) self.tags = kwargs.get('tags', None) self.identity = kwargs.get('identity', None) self.properties = kwargs.get('properties', None) class ComputeIdentityContract(msrest.serialization.Model): """ComputeIdentityContract. :ivar type: :vartype type: str :ivar system_identity_url: :vartype system_identity_url: str :ivar principal_id: :vartype principal_id: str :ivar tenant_id: :vartype tenant_id: str :ivar client_id: :vartype client_id: str :ivar client_secret_url: :vartype client_secret_url: str :ivar user_assigned_identities: This is a dictionary. :vartype user_assigned_identities: dict[str, ~flow.models.ComputeRPUserAssignedIdentity] """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'system_identity_url': {'key': 'systemIdentityUrl', 'type': 'str'}, 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, 'client_id': {'key': 'clientId', 'type': 'str'}, 'client_secret_url': {'key': 'clientSecretUrl', 'type': 'str'}, 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{ComputeRPUserAssignedIdentity}'}, } def __init__( self, **kwargs ): """ :keyword type: :paramtype type: str :keyword system_identity_url: :paramtype system_identity_url: str :keyword principal_id: :paramtype principal_id: str :keyword tenant_id: :paramtype tenant_id: str :keyword client_id: :paramtype client_id: str :keyword client_secret_url: :paramtype client_secret_url: str :keyword user_assigned_identities: This is a dictionary. :paramtype user_assigned_identities: dict[str, ~flow.models.ComputeRPUserAssignedIdentity] """ super(ComputeIdentityContract, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.system_identity_url = kwargs.get('system_identity_url', None) self.principal_id = kwargs.get('principal_id', None) self.tenant_id = kwargs.get('tenant_id', None) self.client_id = kwargs.get('client_id', None) self.client_secret_url = kwargs.get('client_secret_url', None) self.user_assigned_identities = kwargs.get('user_assigned_identities', None) class ComputeIdentityDto(msrest.serialization.Model): """ComputeIdentityDto. :ivar compute_name: :vartype compute_name: str :ivar compute_target_type: Possible values include: "Local", "Remote", "HdiCluster", "ContainerInstance", "AmlCompute", "ComputeInstance", "Cmk8s", "SynapseSpark", "Kubernetes", "Aisc", "GlobalJobDispatcher", "Databricks", "MockedCompute". :vartype compute_target_type: str or ~flow.models.ComputeTargetType :ivar intellectual_property_publisher: :vartype intellectual_property_publisher: str """ _attribute_map = { 'compute_name': {'key': 'computeName', 'type': 'str'}, 'compute_target_type': {'key': 'computeTargetType', 'type': 'str'}, 'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword compute_name: :paramtype compute_name: str :keyword compute_target_type: Possible values include: "Local", "Remote", "HdiCluster", "ContainerInstance", "AmlCompute", "ComputeInstance", "Cmk8s", "SynapseSpark", "Kubernetes", "Aisc", "GlobalJobDispatcher", "Databricks", "MockedCompute". :paramtype compute_target_type: str or ~flow.models.ComputeTargetType :keyword intellectual_property_publisher: :paramtype intellectual_property_publisher: str """ super(ComputeIdentityDto, self).__init__(**kwargs) self.compute_name = kwargs.get('compute_name', None) self.compute_target_type = kwargs.get('compute_target_type', None) self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None) class ComputeInfo(msrest.serialization.Model): """ComputeInfo. :ivar name: :vartype name: str :ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN". :vartype compute_type: str or ~flow.models.ComputeEnvironmentType :ivar is_ssl_enabled: :vartype is_ssl_enabled: bool :ivar is_gpu_type: :vartype is_gpu_type: bool :ivar cluster_purpose: :vartype cluster_purpose: str :ivar public_ip_address: :vartype public_ip_address: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'is_ssl_enabled': {'key': 'isSslEnabled', 'type': 'bool'}, 'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'}, 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'}, 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN". :paramtype compute_type: str or ~flow.models.ComputeEnvironmentType :keyword is_ssl_enabled: :paramtype is_ssl_enabled: bool :keyword is_gpu_type: :paramtype is_gpu_type: bool :keyword cluster_purpose: :paramtype cluster_purpose: str :keyword public_ip_address: :paramtype public_ip_address: str """ super(ComputeInfo, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.compute_type = kwargs.get('compute_type', None) self.is_ssl_enabled = kwargs.get('is_ssl_enabled', None) self.is_gpu_type = kwargs.get('is_gpu_type', None) self.cluster_purpose = kwargs.get('cluster_purpose', None) self.public_ip_address = kwargs.get('public_ip_address', None) class ComputeProperties(msrest.serialization.Model): """ComputeProperties. All required parameters must be populated in order to send to Azure. :ivar created_on: :vartype created_on: ~datetime.datetime :ivar modified_on: :vartype modified_on: ~datetime.datetime :ivar disable_local_auth: :vartype disable_local_auth: bool :ivar description: :vartype description: str :ivar resource_id: :vartype resource_id: str :ivar compute_type: Required. :vartype compute_type: str :ivar compute_location: :vartype compute_location: str :ivar provisioning_state: Possible values include: "Unknown", "Updating", "Creating", "Deleting", "Accepted", "Succeeded", "Failed", "Canceled". :vartype provisioning_state: str or ~flow.models.ProvisioningState :ivar provisioning_errors: :vartype provisioning_errors: list[~flow.models.ODataErrorResponse] :ivar provisioning_warnings: This is a dictionary. :vartype provisioning_warnings: dict[str, str] :ivar is_attached_compute: :vartype is_attached_compute: bool :ivar properties: Any object. :vartype properties: any :ivar status: :vartype status: ~flow.models.ComputeStatus :ivar warnings: :vartype warnings: list[~flow.models.ComputeWarning] """ _validation = { 'compute_type': {'required': True, 'min_length': 1}, } _attribute_map = { 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'compute_location': {'key': 'computeLocation', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ODataErrorResponse]'}, 'provisioning_warnings': {'key': 'provisioningWarnings', 'type': '{str}'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, 'properties': {'key': 'properties', 'type': 'object'}, 'status': {'key': 'status', 'type': 'ComputeStatus'}, 'warnings': {'key': 'warnings', 'type': '[ComputeWarning]'}, } def __init__( self, **kwargs ): """ :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword modified_on: :paramtype modified_on: ~datetime.datetime :keyword disable_local_auth: :paramtype disable_local_auth: bool :keyword description: :paramtype description: str :keyword resource_id: :paramtype resource_id: str :keyword compute_type: Required. :paramtype compute_type: str :keyword compute_location: :paramtype compute_location: str :keyword provisioning_state: Possible values include: "Unknown", "Updating", "Creating", "Deleting", "Accepted", "Succeeded", "Failed", "Canceled". :paramtype provisioning_state: str or ~flow.models.ProvisioningState :keyword provisioning_errors: :paramtype provisioning_errors: list[~flow.models.ODataErrorResponse] :keyword provisioning_warnings: This is a dictionary. :paramtype provisioning_warnings: dict[str, str] :keyword is_attached_compute: :paramtype is_attached_compute: bool :keyword properties: Any object. :paramtype properties: any :keyword status: :paramtype status: ~flow.models.ComputeStatus :keyword warnings: :paramtype warnings: list[~flow.models.ComputeWarning] """ super(ComputeProperties, self).__init__(**kwargs) self.created_on = kwargs.get('created_on', None) self.modified_on = kwargs.get('modified_on', None) self.disable_local_auth = kwargs.get('disable_local_auth', None) self.description = kwargs.get('description', None) self.resource_id = kwargs.get('resource_id', None) self.compute_type = kwargs['compute_type'] self.compute_location = kwargs.get('compute_location', None) self.provisioning_state = kwargs.get('provisioning_state', None) self.provisioning_errors = kwargs.get('provisioning_errors', None) self.provisioning_warnings = kwargs.get('provisioning_warnings', None) self.is_attached_compute = kwargs.get('is_attached_compute', None) self.properties = kwargs.get('properties', None) self.status = kwargs.get('status', None) self.warnings = kwargs.get('warnings', None) class ComputeRequest(msrest.serialization.Model): """ComputeRequest. :ivar node_count: :vartype node_count: int :ivar gpu_count: :vartype gpu_count: int """ _attribute_map = { 'node_count': {'key': 'nodeCount', 'type': 'int'}, 'gpu_count': {'key': 'gpuCount', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword node_count: :paramtype node_count: int :keyword gpu_count: :paramtype gpu_count: int """ super(ComputeRequest, self).__init__(**kwargs) self.node_count = kwargs.get('node_count', None) self.gpu_count = kwargs.get('gpu_count', None) class ComputeRPUserAssignedIdentity(msrest.serialization.Model): """ComputeRPUserAssignedIdentity. :ivar principal_id: :vartype principal_id: str :ivar tenant_id: :vartype tenant_id: str :ivar client_id: :vartype client_id: str :ivar client_secret_url: :vartype client_secret_url: str :ivar resource_id: :vartype resource_id: str """ _attribute_map = { 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, 'client_id': {'key': 'clientId', 'type': 'str'}, 'client_secret_url': {'key': 'clientSecretUrl', 'type': 'str'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword principal_id: :paramtype principal_id: str :keyword tenant_id: :paramtype tenant_id: str :keyword client_id: :paramtype client_id: str :keyword client_secret_url: :paramtype client_secret_url: str :keyword resource_id: :paramtype resource_id: str """ super(ComputeRPUserAssignedIdentity, self).__init__(**kwargs) self.principal_id = kwargs.get('principal_id', None) self.tenant_id = kwargs.get('tenant_id', None) self.client_id = kwargs.get('client_id', None) self.client_secret_url = kwargs.get('client_secret_url', None) self.resource_id = kwargs.get('resource_id', None) class ComputeSetting(msrest.serialization.Model): """ComputeSetting. :ivar name: :vartype name: str :ivar compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker", "Databricks", "Aisc". :vartype compute_type: str or ~flow.models.ComputeType :ivar batch_ai_compute_info: :vartype batch_ai_compute_info: ~flow.models.BatchAiComputeInfo :ivar remote_docker_compute_info: :vartype remote_docker_compute_info: ~flow.models.RemoteDockerComputeInfo :ivar hdi_cluster_compute_info: :vartype hdi_cluster_compute_info: ~flow.models.HdiClusterComputeInfo :ivar mlc_compute_info: :vartype mlc_compute_info: ~flow.models.MlcComputeInfo :ivar databricks_compute_info: :vartype databricks_compute_info: ~flow.models.DatabricksComputeInfo """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'batch_ai_compute_info': {'key': 'batchAiComputeInfo', 'type': 'BatchAiComputeInfo'}, 'remote_docker_compute_info': {'key': 'remoteDockerComputeInfo', 'type': 'RemoteDockerComputeInfo'}, 'hdi_cluster_compute_info': {'key': 'hdiClusterComputeInfo', 'type': 'HdiClusterComputeInfo'}, 'mlc_compute_info': {'key': 'mlcComputeInfo', 'type': 'MlcComputeInfo'}, 'databricks_compute_info': {'key': 'databricksComputeInfo', 'type': 'DatabricksComputeInfo'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker", "Databricks", "Aisc". :paramtype compute_type: str or ~flow.models.ComputeType :keyword batch_ai_compute_info: :paramtype batch_ai_compute_info: ~flow.models.BatchAiComputeInfo :keyword remote_docker_compute_info: :paramtype remote_docker_compute_info: ~flow.models.RemoteDockerComputeInfo :keyword hdi_cluster_compute_info: :paramtype hdi_cluster_compute_info: ~flow.models.HdiClusterComputeInfo :keyword mlc_compute_info: :paramtype mlc_compute_info: ~flow.models.MlcComputeInfo :keyword databricks_compute_info: :paramtype databricks_compute_info: ~flow.models.DatabricksComputeInfo """ super(ComputeSetting, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.compute_type = kwargs.get('compute_type', None) self.batch_ai_compute_info = kwargs.get('batch_ai_compute_info', None) self.remote_docker_compute_info = kwargs.get('remote_docker_compute_info', None) self.hdi_cluster_compute_info = kwargs.get('hdi_cluster_compute_info', None) self.mlc_compute_info = kwargs.get('mlc_compute_info', None) self.databricks_compute_info = kwargs.get('databricks_compute_info', None) class ComputeStatus(msrest.serialization.Model): """ComputeStatus. Variables are only populated by the server, and will be ignored when sending a request. :ivar is_status_available: :vartype is_status_available: bool :ivar detailed_status: Anything. :vartype detailed_status: any :ivar error: Represents OData v4 error object. :vartype error: ~flow.models.ODataError """ _validation = { 'is_status_available': {'readonly': True}, } _attribute_map = { 'is_status_available': {'key': 'isStatusAvailable', 'type': 'bool'}, 'detailed_status': {'key': 'detailedStatus', 'type': 'object'}, 'error': {'key': 'error', 'type': 'ODataError'}, } def __init__( self, **kwargs ): """ :keyword detailed_status: Anything. :paramtype detailed_status: any :keyword error: Represents OData v4 error object. :paramtype error: ~flow.models.ODataError """ super(ComputeStatus, self).__init__(**kwargs) self.is_status_available = None self.detailed_status = kwargs.get('detailed_status', None) self.error = kwargs.get('error', None) class ComputeStatusDetail(msrest.serialization.Model): """ComputeStatusDetail. :ivar provisioning_state: :vartype provisioning_state: str :ivar provisioning_error_message: :vartype provisioning_error_message: str """ _attribute_map = { 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'provisioning_error_message': {'key': 'provisioningErrorMessage', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword provisioning_state: :paramtype provisioning_state: str :keyword provisioning_error_message: :paramtype provisioning_error_message: str """ super(ComputeStatusDetail, self).__init__(**kwargs) self.provisioning_state = kwargs.get('provisioning_state', None) self.provisioning_error_message = kwargs.get('provisioning_error_message', None) class ComputeWarning(msrest.serialization.Model): """ComputeWarning. :ivar title: :vartype title: str :ivar message: :vartype message: str :ivar code: :vartype code: str :ivar severity: Possible values include: "Critical", "Error", "Warning", "Info". :vartype severity: str or ~flow.models.SeverityLevel """ _attribute_map = { 'title': {'key': 'title', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'code': {'key': 'code', 'type': 'str'}, 'severity': {'key': 'severity', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword title: :paramtype title: str :keyword message: :paramtype message: str :keyword code: :paramtype code: str :keyword severity: Possible values include: "Critical", "Error", "Warning", "Info". :paramtype severity: str or ~flow.models.SeverityLevel """ super(ComputeWarning, self).__init__(**kwargs) self.title = kwargs.get('title', None) self.message = kwargs.get('message', None) self.code = kwargs.get('code', None) self.severity = kwargs.get('severity', None) class ConnectionConfigSpec(msrest.serialization.Model): """ConnectionConfigSpec. :ivar name: :vartype name: str :ivar display_name: :vartype display_name: str :ivar config_value_type: Possible values include: "String", "Secret". :vartype config_value_type: str or ~flow.models.ConfigValueType :ivar description: :vartype description: str :ivar default_value: :vartype default_value: str :ivar enum_values: :vartype enum_values: list[str] :ivar is_optional: :vartype is_optional: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'config_value_type': {'key': 'configValueType', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, 'enum_values': {'key': 'enumValues', 'type': '[str]'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword display_name: :paramtype display_name: str :keyword config_value_type: Possible values include: "String", "Secret". :paramtype config_value_type: str or ~flow.models.ConfigValueType :keyword description: :paramtype description: str :keyword default_value: :paramtype default_value: str :keyword enum_values: :paramtype enum_values: list[str] :keyword is_optional: :paramtype is_optional: bool """ super(ConnectionConfigSpec, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.display_name = kwargs.get('display_name', None) self.config_value_type = kwargs.get('config_value_type', None) self.description = kwargs.get('description', None) self.default_value = kwargs.get('default_value', None) self.enum_values = kwargs.get('enum_values', None) self.is_optional = kwargs.get('is_optional', None) class ConnectionDto(msrest.serialization.Model): """ConnectionDto. :ivar connection_name: :vartype connection_name: str :ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :vartype connection_type: str or ~flow.models.ConnectionType :ivar configs: This is a dictionary. :vartype configs: dict[str, str] :ivar custom_configs: This is a dictionary. :vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :ivar expiry_time: :vartype expiry_time: ~datetime.datetime :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'connection_name': {'key': 'connectionName', 'type': 'str'}, 'connection_type': {'key': 'connectionType', 'type': 'str'}, 'configs': {'key': 'configs', 'type': '{str}'}, 'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'}, 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword connection_name: :paramtype connection_name: str :keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :paramtype connection_type: str or ~flow.models.ConnectionType :keyword configs: This is a dictionary. :paramtype configs: dict[str, str] :keyword custom_configs: This is a dictionary. :paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(ConnectionDto, self).__init__(**kwargs) self.connection_name = kwargs.get('connection_name', None) self.connection_type = kwargs.get('connection_type', None) self.configs = kwargs.get('configs', None) self.custom_configs = kwargs.get('custom_configs', None) self.expiry_time = kwargs.get('expiry_time', None) self.owner = kwargs.get('owner', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class ConnectionEntity(msrest.serialization.Model): """ConnectionEntity. :ivar connection_id: :vartype connection_id: str :ivar connection_name: :vartype connection_name: str :ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :vartype connection_type: str or ~flow.models.ConnectionType :ivar connection_scope: Possible values include: "User", "WorkspaceShared". :vartype connection_scope: str or ~flow.models.ConnectionScope :ivar configs: This is a dictionary. :vartype configs: dict[str, str] :ivar custom_configs: This is a dictionary. :vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :ivar expiry_time: :vartype expiry_time: ~datetime.datetime :ivar secret_name: :vartype secret_name: str :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'connection_id': {'key': 'connectionId', 'type': 'str'}, 'connection_name': {'key': 'connectionName', 'type': 'str'}, 'connection_type': {'key': 'connectionType', 'type': 'str'}, 'connection_scope': {'key': 'connectionScope', 'type': 'str'}, 'configs': {'key': 'configs', 'type': '{str}'}, 'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'}, 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, 'secret_name': {'key': 'secretName', 'type': 'str'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword connection_id: :paramtype connection_id: str :keyword connection_name: :paramtype connection_name: str :keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :paramtype connection_type: str or ~flow.models.ConnectionType :keyword connection_scope: Possible values include: "User", "WorkspaceShared". :paramtype connection_scope: str or ~flow.models.ConnectionScope :keyword configs: This is a dictionary. :paramtype configs: dict[str, str] :keyword custom_configs: This is a dictionary. :paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime :keyword secret_name: :paramtype secret_name: str :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(ConnectionEntity, self).__init__(**kwargs) self.connection_id = kwargs.get('connection_id', None) self.connection_name = kwargs.get('connection_name', None) self.connection_type = kwargs.get('connection_type', None) self.connection_scope = kwargs.get('connection_scope', None) self.configs = kwargs.get('configs', None) self.custom_configs = kwargs.get('custom_configs', None) self.expiry_time = kwargs.get('expiry_time', None) self.secret_name = kwargs.get('secret_name', None) self.owner = kwargs.get('owner', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class ConnectionOverrideSetting(msrest.serialization.Model): """ConnectionOverrideSetting. :ivar connection_source_type: Possible values include: "Node", "NodeInput". :vartype connection_source_type: str or ~flow.models.ConnectionSourceType :ivar node_name: :vartype node_name: str :ivar node_input_name: :vartype node_input_name: str :ivar node_deployment_name_input: :vartype node_deployment_name_input: str :ivar node_model_input: :vartype node_model_input: str :ivar connection_name: :vartype connection_name: str :ivar deployment_name: :vartype deployment_name: str :ivar model: :vartype model: str :ivar connection_types: :vartype connection_types: list[str or ~flow.models.ConnectionType] :ivar capabilities: :vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities :ivar model_enum: :vartype model_enum: list[str] """ _attribute_map = { 'connection_source_type': {'key': 'connectionSourceType', 'type': 'str'}, 'node_name': {'key': 'nodeName', 'type': 'str'}, 'node_input_name': {'key': 'nodeInputName', 'type': 'str'}, 'node_deployment_name_input': {'key': 'nodeDeploymentNameInput', 'type': 'str'}, 'node_model_input': {'key': 'nodeModelInput', 'type': 'str'}, 'connection_name': {'key': 'connectionName', 'type': 'str'}, 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, 'model': {'key': 'model', 'type': 'str'}, 'connection_types': {'key': 'connectionTypes', 'type': '[str]'}, 'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'}, 'model_enum': {'key': 'modelEnum', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword connection_source_type: Possible values include: "Node", "NodeInput". :paramtype connection_source_type: str or ~flow.models.ConnectionSourceType :keyword node_name: :paramtype node_name: str :keyword node_input_name: :paramtype node_input_name: str :keyword node_deployment_name_input: :paramtype node_deployment_name_input: str :keyword node_model_input: :paramtype node_model_input: str :keyword connection_name: :paramtype connection_name: str :keyword deployment_name: :paramtype deployment_name: str :keyword model: :paramtype model: str :keyword connection_types: :paramtype connection_types: list[str or ~flow.models.ConnectionType] :keyword capabilities: :paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities :keyword model_enum: :paramtype model_enum: list[str] """ super(ConnectionOverrideSetting, self).__init__(**kwargs) self.connection_source_type = kwargs.get('connection_source_type', None) self.node_name = kwargs.get('node_name', None) self.node_input_name = kwargs.get('node_input_name', None) self.node_deployment_name_input = kwargs.get('node_deployment_name_input', None) self.node_model_input = kwargs.get('node_model_input', None) self.connection_name = kwargs.get('connection_name', None) self.deployment_name = kwargs.get('deployment_name', None) self.model = kwargs.get('model', None) self.connection_types = kwargs.get('connection_types', None) self.capabilities = kwargs.get('capabilities', None) self.model_enum = kwargs.get('model_enum', None) class ConnectionSpec(msrest.serialization.Model): """ConnectionSpec. :ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :vartype connection_type: str or ~flow.models.ConnectionType :ivar config_specs: :vartype config_specs: list[~flow.models.ConnectionConfigSpec] """ _attribute_map = { 'connection_type': {'key': 'connectionType', 'type': 'str'}, 'config_specs': {'key': 'configSpecs', 'type': '[ConnectionConfigSpec]'}, } def __init__( self, **kwargs ): """ :keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :paramtype connection_type: str or ~flow.models.ConnectionType :keyword config_specs: :paramtype config_specs: list[~flow.models.ConnectionConfigSpec] """ super(ConnectionSpec, self).__init__(**kwargs) self.connection_type = kwargs.get('connection_type', None) self.config_specs = kwargs.get('config_specs', None) class ContainerInstanceConfiguration(msrest.serialization.Model): """ContainerInstanceConfiguration. :ivar region: :vartype region: str :ivar cpu_cores: :vartype cpu_cores: float :ivar memory_gb: :vartype memory_gb: float """ _attribute_map = { 'region': {'key': 'region', 'type': 'str'}, 'cpu_cores': {'key': 'cpuCores', 'type': 'float'}, 'memory_gb': {'key': 'memoryGb', 'type': 'float'}, } def __init__( self, **kwargs ): """ :keyword region: :paramtype region: str :keyword cpu_cores: :paramtype cpu_cores: float :keyword memory_gb: :paramtype memory_gb: float """ super(ContainerInstanceConfiguration, self).__init__(**kwargs) self.region = kwargs.get('region', None) self.cpu_cores = kwargs.get('cpu_cores', None) self.memory_gb = kwargs.get('memory_gb', None) class ContainerRegistry(msrest.serialization.Model): """ContainerRegistry. :ivar address: :vartype address: str :ivar username: :vartype username: str :ivar password: :vartype password: str :ivar credential_type: :vartype credential_type: str :ivar registry_identity: :vartype registry_identity: ~flow.models.RegistryIdentity """ _attribute_map = { 'address': {'key': 'address', 'type': 'str'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'credential_type': {'key': 'credentialType', 'type': 'str'}, 'registry_identity': {'key': 'registryIdentity', 'type': 'RegistryIdentity'}, } def __init__( self, **kwargs ): """ :keyword address: :paramtype address: str :keyword username: :paramtype username: str :keyword password: :paramtype password: str :keyword credential_type: :paramtype credential_type: str :keyword registry_identity: :paramtype registry_identity: ~flow.models.RegistryIdentity """ super(ContainerRegistry, self).__init__(**kwargs) self.address = kwargs.get('address', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.credential_type = kwargs.get('credential_type', None) self.registry_identity = kwargs.get('registry_identity', None) class ContainerResourceRequirements(msrest.serialization.Model): """ContainerResourceRequirements. :ivar cpu: :vartype cpu: float :ivar cpu_limit: :vartype cpu_limit: float :ivar memory_in_gb: :vartype memory_in_gb: float :ivar memory_in_gb_limit: :vartype memory_in_gb_limit: float :ivar gpu_enabled: :vartype gpu_enabled: bool :ivar gpu: :vartype gpu: int :ivar fpga: :vartype fpga: int """ _attribute_map = { 'cpu': {'key': 'cpu', 'type': 'float'}, 'cpu_limit': {'key': 'cpuLimit', 'type': 'float'}, 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'}, 'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'}, 'gpu_enabled': {'key': 'gpuEnabled', 'type': 'bool'}, 'gpu': {'key': 'gpu', 'type': 'int'}, 'fpga': {'key': 'fpga', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword cpu: :paramtype cpu: float :keyword cpu_limit: :paramtype cpu_limit: float :keyword memory_in_gb: :paramtype memory_in_gb: float :keyword memory_in_gb_limit: :paramtype memory_in_gb_limit: float :keyword gpu_enabled: :paramtype gpu_enabled: bool :keyword gpu: :paramtype gpu: int :keyword fpga: :paramtype fpga: int """ super(ContainerResourceRequirements, self).__init__(**kwargs) self.cpu = kwargs.get('cpu', None) self.cpu_limit = kwargs.get('cpu_limit', None) self.memory_in_gb = kwargs.get('memory_in_gb', None) self.memory_in_gb_limit = kwargs.get('memory_in_gb_limit', None) self.gpu_enabled = kwargs.get('gpu_enabled', None) self.gpu = kwargs.get('gpu', None) self.fpga = kwargs.get('fpga', None) class ControlInput(msrest.serialization.Model): """ControlInput. :ivar name: :vartype name: str :ivar default_value: Possible values include: "None", "False", "True", "Skipped". :vartype default_value: str or ~flow.models.ControlInputValue """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword default_value: Possible values include: "None", "False", "True", "Skipped". :paramtype default_value: str or ~flow.models.ControlInputValue """ super(ControlInput, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.default_value = kwargs.get('default_value', None) class ControlOutput(msrest.serialization.Model): """ControlOutput. :ivar name: :vartype name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str """ super(ControlOutput, self).__init__(**kwargs) self.name = kwargs.get('name', None) class CopyDataTask(msrest.serialization.Model): """CopyDataTask. :ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :vartype data_copy_mode: str or ~flow.models.DataCopyMode """ _attribute_map = { 'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :paramtype data_copy_mode: str or ~flow.models.DataCopyMode """ super(CopyDataTask, self).__init__(**kwargs) self.data_copy_mode = kwargs.get('data_copy_mode', None) class CreatedBy(msrest.serialization.Model): """CreatedBy. :ivar user_object_id: :vartype user_object_id: str :ivar user_tenant_id: :vartype user_tenant_id: str :ivar user_name: :vartype user_name: str """ _attribute_map = { 'user_object_id': {'key': 'userObjectId', 'type': 'str'}, 'user_tenant_id': {'key': 'userTenantId', 'type': 'str'}, 'user_name': {'key': 'userName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword user_object_id: :paramtype user_object_id: str :keyword user_tenant_id: :paramtype user_tenant_id: str :keyword user_name: :paramtype user_name: str """ super(CreatedBy, self).__init__(**kwargs) self.user_object_id = kwargs.get('user_object_id', None) self.user_tenant_id = kwargs.get('user_tenant_id', None) self.user_name = kwargs.get('user_name', None) class CreatedFromDto(msrest.serialization.Model): """CreatedFromDto. :ivar type: The only acceptable values to pass in are None and "Notebook". The default value is None. :vartype type: str :ivar location_type: The only acceptable values to pass in are None and "ArtifactId". The default value is None. :vartype location_type: str :ivar location: :vartype location: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'location_type': {'key': 'locationType', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword type: The only acceptable values to pass in are None and "Notebook". The default value is None. :paramtype type: str :keyword location_type: The only acceptable values to pass in are None and "ArtifactId". The default value is None. :paramtype location_type: str :keyword location: :paramtype location: str """ super(CreatedFromDto, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.location_type = kwargs.get('location_type', None) self.location = kwargs.get('location', None) class CreateFlowFromSampleRequest(msrest.serialization.Model): """CreateFlowFromSampleRequest. :ivar flow_name: :vartype flow_name: str :ivar sample_resource_id: :vartype sample_resource_id: str :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar is_archived: :vartype is_archived: bool """ _attribute_map = { 'flow_name': {'key': 'flowName', 'type': 'str'}, 'sample_resource_id': {'key': 'sampleResourceId', 'type': 'str'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword flow_name: :paramtype flow_name: str :keyword sample_resource_id: :paramtype sample_resource_id: str :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword is_archived: :paramtype is_archived: bool """ super(CreateFlowFromSampleRequest, self).__init__(**kwargs) self.flow_name = kwargs.get('flow_name', None) self.sample_resource_id = kwargs.get('sample_resource_id', None) self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None) self.tags = kwargs.get('tags', None) self.is_archived = kwargs.get('is_archived', None) class CreateFlowRequest(msrest.serialization.Model): """CreateFlowRequest. :ivar flow_name: :vartype flow_name: str :ivar description: :vartype description: str :ivar details: :vartype details: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar flow: :vartype flow: ~flow.models.Flow :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar flow_run_settings: :vartype flow_run_settings: ~flow.models.FlowRunSettings :ivar is_archived: :vartype is_archived: bool :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar identity: :vartype identity: str """ _attribute_map = { 'flow_name': {'key': 'flowName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'details': {'key': 'details', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'flow': {'key': 'flow', 'type': 'Flow'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword flow_name: :paramtype flow_name: str :keyword description: :paramtype description: str :keyword details: :paramtype details: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword flow: :paramtype flow: ~flow.models.Flow :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword flow_run_settings: :paramtype flow_run_settings: ~flow.models.FlowRunSettings :keyword is_archived: :paramtype is_archived: bool :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword identity: :paramtype identity: str """ super(CreateFlowRequest, self).__init__(**kwargs) self.flow_name = kwargs.get('flow_name', None) self.description = kwargs.get('description', None) self.details = kwargs.get('details', None) self.tags = kwargs.get('tags', None) self.flow = kwargs.get('flow', None) self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None) self.flow_type = kwargs.get('flow_type', None) self.flow_run_settings = kwargs.get('flow_run_settings', None) self.is_archived = kwargs.get('is_archived', None) self.vm_size = kwargs.get('vm_size', None) self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None) self.identity = kwargs.get('identity', None) class CreateFlowRuntimeRequest(msrest.serialization.Model): """CreateFlowRuntimeRequest. :ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession". :vartype runtime_type: str or ~flow.models.RuntimeType :ivar identity: :vartype identity: ~flow.models.ManagedServiceIdentity :ivar instance_type: :vartype instance_type: str :ivar from_existing_endpoint: :vartype from_existing_endpoint: bool :ivar from_existing_deployment: :vartype from_existing_deployment: bool :ivar endpoint_name: :vartype endpoint_name: str :ivar deployment_name: :vartype deployment_name: str :ivar compute_instance_name: :vartype compute_instance_name: str :ivar from_existing_custom_app: :vartype from_existing_custom_app: bool :ivar custom_app_name: :vartype custom_app_name: str :ivar runtime_description: :vartype runtime_description: str :ivar environment: :vartype environment: str :ivar instance_count: :vartype instance_count: int """ _attribute_map = { 'runtime_type': {'key': 'runtimeType', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'}, 'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'}, 'endpoint_name': {'key': 'endpointName', 'type': 'str'}, 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, 'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'}, 'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'}, 'custom_app_name': {'key': 'customAppName', 'type': 'str'}, 'runtime_description': {'key': 'runtimeDescription', 'type': 'str'}, 'environment': {'key': 'environment', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession". :paramtype runtime_type: str or ~flow.models.RuntimeType :keyword identity: :paramtype identity: ~flow.models.ManagedServiceIdentity :keyword instance_type: :paramtype instance_type: str :keyword from_existing_endpoint: :paramtype from_existing_endpoint: bool :keyword from_existing_deployment: :paramtype from_existing_deployment: bool :keyword endpoint_name: :paramtype endpoint_name: str :keyword deployment_name: :paramtype deployment_name: str :keyword compute_instance_name: :paramtype compute_instance_name: str :keyword from_existing_custom_app: :paramtype from_existing_custom_app: bool :keyword custom_app_name: :paramtype custom_app_name: str :keyword runtime_description: :paramtype runtime_description: str :keyword environment: :paramtype environment: str :keyword instance_count: :paramtype instance_count: int """ super(CreateFlowRuntimeRequest, self).__init__(**kwargs) self.runtime_type = kwargs.get('runtime_type', None) self.identity = kwargs.get('identity', None) self.instance_type = kwargs.get('instance_type', None) self.from_existing_endpoint = kwargs.get('from_existing_endpoint', None) self.from_existing_deployment = kwargs.get('from_existing_deployment', None) self.endpoint_name = kwargs.get('endpoint_name', None) self.deployment_name = kwargs.get('deployment_name', None) self.compute_instance_name = kwargs.get('compute_instance_name', None) self.from_existing_custom_app = kwargs.get('from_existing_custom_app', None) self.custom_app_name = kwargs.get('custom_app_name', None) self.runtime_description = kwargs.get('runtime_description', None) self.environment = kwargs.get('environment', None) self.instance_count = kwargs.get('instance_count', None) class CreateFlowSessionRequest(msrest.serialization.Model): """CreateFlowSessionRequest. :ivar python_pip_requirements: :vartype python_pip_requirements: list[str] :ivar base_image: :vartype base_image: str :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar action: Possible values include: "Install", "Reset", "Update", "Delete". :vartype action: str or ~flow.models.SetupFlowSessionAction :ivar identity: :vartype identity: str """ _attribute_map = { 'python_pip_requirements': {'key': 'pythonPipRequirements', 'type': '[str]'}, 'base_image': {'key': 'baseImage', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'action': {'key': 'action', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword python_pip_requirements: :paramtype python_pip_requirements: list[str] :keyword base_image: :paramtype base_image: str :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword action: Possible values include: "Install", "Reset", "Update", "Delete". :paramtype action: str or ~flow.models.SetupFlowSessionAction :keyword identity: :paramtype identity: str """ super(CreateFlowSessionRequest, self).__init__(**kwargs) self.python_pip_requirements = kwargs.get('python_pip_requirements', None) self.base_image = kwargs.get('base_image', None) self.vm_size = kwargs.get('vm_size', None) self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None) self.action = kwargs.get('action', None) self.identity = kwargs.get('identity', None) class CreateInferencePipelineRequest(msrest.serialization.Model): """CreateInferencePipelineRequest. :ivar module_node_id: :vartype module_node_id: str :ivar port_name: :vartype port_name: str :ivar training_pipeline_draft_name: :vartype training_pipeline_draft_name: str :ivar training_pipeline_run_display_name: :vartype training_pipeline_run_display_name: str :ivar name: :vartype name: str :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :vartype graph_components_mode: str or ~flow.models.GraphComponentsMode :ivar sub_pipelines_info: :vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo :ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar graph: :vartype graph: ~flow.models.GraphDraftEntity :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar description: :vartype description: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ _attribute_map = { 'module_node_id': {'key': 'moduleNodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, 'training_pipeline_draft_name': {'key': 'trainingPipelineDraftName', 'type': 'str'}, 'training_pipeline_run_display_name': {'key': 'trainingPipelineRunDisplayName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'}, 'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'}, 'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'}, 'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'graph': {'key': 'graph', 'type': 'GraphDraftEntity'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword module_node_id: :paramtype module_node_id: str :keyword port_name: :paramtype port_name: str :keyword training_pipeline_draft_name: :paramtype training_pipeline_draft_name: str :keyword training_pipeline_run_display_name: :paramtype training_pipeline_run_display_name: str :keyword name: :paramtype name: str :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode :keyword sub_pipelines_info: :paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo :keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword graph: :paramtype graph: ~flow.models.GraphDraftEntity :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword description: :paramtype description: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ super(CreateInferencePipelineRequest, self).__init__(**kwargs) self.module_node_id = kwargs.get('module_node_id', None) self.port_name = kwargs.get('port_name', None) self.training_pipeline_draft_name = kwargs.get('training_pipeline_draft_name', None) self.training_pipeline_run_display_name = kwargs.get('training_pipeline_run_display_name', None) self.name = kwargs.get('name', None) self.pipeline_type = kwargs.get('pipeline_type', None) self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None) self.graph_components_mode = kwargs.get('graph_components_mode', None) self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None) self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None) self.pipeline_parameters = kwargs.get('pipeline_parameters', None) self.data_path_assignments = kwargs.get('data_path_assignments', None) self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None) self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None) self.graph = kwargs.get('graph', None) self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None) self.module_node_run_settings = kwargs.get('module_node_run_settings', None) self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None) self.tags = kwargs.get('tags', None) self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None) self.description = kwargs.get('description', None) self.properties = kwargs.get('properties', None) self.enforce_rerun = kwargs.get('enforce_rerun', None) self.dataset_access_modes = kwargs.get('dataset_access_modes', None) class CreateOrUpdateConnectionRequest(msrest.serialization.Model): """CreateOrUpdateConnectionRequest. :ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :vartype connection_type: str or ~flow.models.ConnectionType :ivar connection_scope: Possible values include: "User", "WorkspaceShared". :vartype connection_scope: str or ~flow.models.ConnectionScope :ivar configs: This is a dictionary. :vartype configs: dict[str, str] :ivar custom_configs: This is a dictionary. :vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :ivar expiry_time: :vartype expiry_time: ~datetime.datetime """ _attribute_map = { 'connection_type': {'key': 'connectionType', 'type': 'str'}, 'connection_scope': {'key': 'connectionScope', 'type': 'str'}, 'configs': {'key': 'configs', 'type': '{str}'}, 'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'}, 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :paramtype connection_type: str or ~flow.models.ConnectionType :keyword connection_scope: Possible values include: "User", "WorkspaceShared". :paramtype connection_scope: str or ~flow.models.ConnectionScope :keyword configs: This is a dictionary. :paramtype configs: dict[str, str] :keyword custom_configs: This is a dictionary. :paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime """ super(CreateOrUpdateConnectionRequest, self).__init__(**kwargs) self.connection_type = kwargs.get('connection_type', None) self.connection_scope = kwargs.get('connection_scope', None) self.configs = kwargs.get('configs', None) self.custom_configs = kwargs.get('custom_configs', None) self.expiry_time = kwargs.get('expiry_time', None) class CreateOrUpdateConnectionRequestDto(msrest.serialization.Model): """CreateOrUpdateConnectionRequestDto. :ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :vartype connection_type: str or ~flow.models.ConnectionType :ivar configs: This is a dictionary. :vartype configs: dict[str, str] :ivar custom_configs: This is a dictionary. :vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :ivar expiry_time: :vartype expiry_time: ~datetime.datetime """ _attribute_map = { 'connection_type': {'key': 'connectionType', 'type': 'str'}, 'configs': {'key': 'configs', 'type': '{str}'}, 'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'}, 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :paramtype connection_type: str or ~flow.models.ConnectionType :keyword configs: This is a dictionary. :paramtype configs: dict[str, str] :keyword custom_configs: This is a dictionary. :paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime """ super(CreateOrUpdateConnectionRequestDto, self).__init__(**kwargs) self.connection_type = kwargs.get('connection_type', None) self.configs = kwargs.get('configs', None) self.custom_configs = kwargs.get('custom_configs', None) self.expiry_time = kwargs.get('expiry_time', None) class CreatePipelineDraftRequest(msrest.serialization.Model): """CreatePipelineDraftRequest. :ivar name: :vartype name: str :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :vartype graph_components_mode: str or ~flow.models.GraphComponentsMode :ivar sub_pipelines_info: :vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo :ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar graph: :vartype graph: ~flow.models.GraphDraftEntity :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar description: :vartype description: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'}, 'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'}, 'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'}, 'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'graph': {'key': 'graph', 'type': 'GraphDraftEntity'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode :keyword sub_pipelines_info: :paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo :keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword graph: :paramtype graph: ~flow.models.GraphDraftEntity :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword description: :paramtype description: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ super(CreatePipelineDraftRequest, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.pipeline_type = kwargs.get('pipeline_type', None) self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None) self.graph_components_mode = kwargs.get('graph_components_mode', None) self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None) self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None) self.pipeline_parameters = kwargs.get('pipeline_parameters', None) self.data_path_assignments = kwargs.get('data_path_assignments', None) self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None) self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None) self.graph = kwargs.get('graph', None) self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None) self.module_node_run_settings = kwargs.get('module_node_run_settings', None) self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None) self.tags = kwargs.get('tags', None) self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None) self.description = kwargs.get('description', None) self.properties = kwargs.get('properties', None) self.enforce_rerun = kwargs.get('enforce_rerun', None) self.dataset_access_modes = kwargs.get('dataset_access_modes', None) class CreatePipelineJobScheduleDto(msrest.serialization.Model): """CreatePipelineJobScheduleDto. :ivar name: :vartype name: str :ivar pipeline_job_name: :vartype pipeline_job_name: str :ivar pipeline_job_runtime_settings: :vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings :ivar display_name: :vartype display_name: str :ivar trigger_type: Possible values include: "Recurrence", "Cron". :vartype trigger_type: str or ~flow.models.TriggerType :ivar recurrence: :vartype recurrence: ~flow.models.Recurrence :ivar cron: :vartype cron: ~flow.models.Cron :ivar status: Possible values include: "Enabled", "Disabled". :vartype status: str or ~flow.models.ScheduleStatus :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'}, 'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'trigger_type': {'key': 'triggerType', 'type': 'str'}, 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, 'cron': {'key': 'cron', 'type': 'Cron'}, 'status': {'key': 'status', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword pipeline_job_name: :paramtype pipeline_job_name: str :keyword pipeline_job_runtime_settings: :paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings :keyword display_name: :paramtype display_name: str :keyword trigger_type: Possible values include: "Recurrence", "Cron". :paramtype trigger_type: str or ~flow.models.TriggerType :keyword recurrence: :paramtype recurrence: ~flow.models.Recurrence :keyword cron: :paramtype cron: ~flow.models.Cron :keyword status: Possible values include: "Enabled", "Disabled". :paramtype status: str or ~flow.models.ScheduleStatus :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(CreatePipelineJobScheduleDto, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.pipeline_job_name = kwargs.get('pipeline_job_name', None) self.pipeline_job_runtime_settings = kwargs.get('pipeline_job_runtime_settings', None) self.display_name = kwargs.get('display_name', None) self.trigger_type = kwargs.get('trigger_type', None) self.recurrence = kwargs.get('recurrence', None) self.cron = kwargs.get('cron', None) self.status = kwargs.get('status', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) class CreatePublishedPipelineRequest(msrest.serialization.Model): """CreatePublishedPipelineRequest. :ivar use_pipeline_endpoint: :vartype use_pipeline_endpoint: bool :ivar pipeline_name: :vartype pipeline_name: str :ivar pipeline_description: :vartype pipeline_description: str :ivar use_existing_pipeline_endpoint: :vartype use_existing_pipeline_endpoint: bool :ivar pipeline_endpoint_name: :vartype pipeline_endpoint_name: str :ivar pipeline_endpoint_description: :vartype pipeline_endpoint_description: str :ivar set_as_default_pipeline_for_endpoint: :vartype set_as_default_pipeline_for_endpoint: bool :ivar step_tags: This is a dictionary. :vartype step_tags: dict[str, str] :ivar experiment_name: :vartype experiment_name: str :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar enable_notification: :vartype enable_notification: bool :ivar sub_pipelines_info: :vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo :ivar display_name: :vartype display_name: str :ivar run_id: :vartype run_id: str :ivar parent_run_id: :vartype parent_run_id: str :ivar graph: :vartype graph: ~flow.models.GraphDraftEntity :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar description: :vartype description: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ _attribute_map = { 'use_pipeline_endpoint': {'key': 'usePipelineEndpoint', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'pipeline_description': {'key': 'pipelineDescription', 'type': 'str'}, 'use_existing_pipeline_endpoint': {'key': 'useExistingPipelineEndpoint', 'type': 'bool'}, 'pipeline_endpoint_name': {'key': 'pipelineEndpointName', 'type': 'str'}, 'pipeline_endpoint_description': {'key': 'pipelineEndpointDescription', 'type': 'str'}, 'set_as_default_pipeline_for_endpoint': {'key': 'setAsDefaultPipelineForEndpoint', 'type': 'bool'}, 'step_tags': {'key': 'stepTags', 'type': '{str}'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'enable_notification': {'key': 'enableNotification', 'type': 'bool'}, 'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'graph': {'key': 'graph', 'type': 'GraphDraftEntity'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword use_pipeline_endpoint: :paramtype use_pipeline_endpoint: bool :keyword pipeline_name: :paramtype pipeline_name: str :keyword pipeline_description: :paramtype pipeline_description: str :keyword use_existing_pipeline_endpoint: :paramtype use_existing_pipeline_endpoint: bool :keyword pipeline_endpoint_name: :paramtype pipeline_endpoint_name: str :keyword pipeline_endpoint_description: :paramtype pipeline_endpoint_description: str :keyword set_as_default_pipeline_for_endpoint: :paramtype set_as_default_pipeline_for_endpoint: bool :keyword step_tags: This is a dictionary. :paramtype step_tags: dict[str, str] :keyword experiment_name: :paramtype experiment_name: str :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword enable_notification: :paramtype enable_notification: bool :keyword sub_pipelines_info: :paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo :keyword display_name: :paramtype display_name: str :keyword run_id: :paramtype run_id: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword graph: :paramtype graph: ~flow.models.GraphDraftEntity :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword description: :paramtype description: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ super(CreatePublishedPipelineRequest, self).__init__(**kwargs) self.use_pipeline_endpoint = kwargs.get('use_pipeline_endpoint', None) self.pipeline_name = kwargs.get('pipeline_name', None) self.pipeline_description = kwargs.get('pipeline_description', None) self.use_existing_pipeline_endpoint = kwargs.get('use_existing_pipeline_endpoint', None) self.pipeline_endpoint_name = kwargs.get('pipeline_endpoint_name', None) self.pipeline_endpoint_description = kwargs.get('pipeline_endpoint_description', None) self.set_as_default_pipeline_for_endpoint = kwargs.get('set_as_default_pipeline_for_endpoint', None) self.step_tags = kwargs.get('step_tags', None) self.experiment_name = kwargs.get('experiment_name', None) self.pipeline_parameters = kwargs.get('pipeline_parameters', None) self.data_path_assignments = kwargs.get('data_path_assignments', None) self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None) self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None) self.enable_notification = kwargs.get('enable_notification', None) self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None) self.display_name = kwargs.get('display_name', None) self.run_id = kwargs.get('run_id', None) self.parent_run_id = kwargs.get('parent_run_id', None) self.graph = kwargs.get('graph', None) self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None) self.module_node_run_settings = kwargs.get('module_node_run_settings', None) self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None) self.tags = kwargs.get('tags', None) self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None) self.description = kwargs.get('description', None) self.properties = kwargs.get('properties', None) self.enforce_rerun = kwargs.get('enforce_rerun', None) self.dataset_access_modes = kwargs.get('dataset_access_modes', None) class CreateRealTimeEndpointRequest(msrest.serialization.Model): """CreateRealTimeEndpointRequest. :ivar name: :vartype name: str :ivar compute_info: :vartype compute_info: ~flow.models.ComputeInfo :ivar description: :vartype description: str :ivar linked_pipeline_draft_id: :vartype linked_pipeline_draft_id: str :ivar linked_pipeline_run_id: :vartype linked_pipeline_run_id: str :ivar aks_advance_settings: :vartype aks_advance_settings: ~flow.models.AKSAdvanceSettings :ivar aci_advance_settings: :vartype aci_advance_settings: ~flow.models.ACIAdvanceSettings :ivar linked_training_pipeline_run_id: :vartype linked_training_pipeline_run_id: str :ivar linked_experiment_name: :vartype linked_experiment_name: str :ivar graph_nodes_run_id_mapping: This is a dictionary. :vartype graph_nodes_run_id_mapping: dict[str, str] :ivar workflow: :vartype workflow: ~flow.models.PipelineGraph :ivar inputs: :vartype inputs: list[~flow.models.InputOutputPortMetadata] :ivar outputs: :vartype outputs: list[~flow.models.InputOutputPortMetadata] :ivar example_request: :vartype example_request: ~flow.models.ExampleRequest :ivar user_storage_connection_string: :vartype user_storage_connection_string: str :ivar user_storage_endpoint_uri: :vartype user_storage_endpoint_uri: str :ivar user_storage_workspace_sai_token: :vartype user_storage_workspace_sai_token: str :ivar user_storage_container_name: :vartype user_storage_container_name: str :ivar pipeline_run_id: :vartype pipeline_run_id: str :ivar root_pipeline_run_id: :vartype root_pipeline_run_id: str :ivar experiment_name: :vartype experiment_name: str :ivar experiment_id: :vartype experiment_id: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'compute_info': {'key': 'computeInfo', 'type': 'ComputeInfo'}, 'description': {'key': 'description', 'type': 'str'}, 'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'}, 'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'}, 'aks_advance_settings': {'key': 'aksAdvanceSettings', 'type': 'AKSAdvanceSettings'}, 'aci_advance_settings': {'key': 'aciAdvanceSettings', 'type': 'ACIAdvanceSettings'}, 'linked_training_pipeline_run_id': {'key': 'linkedTrainingPipelineRunId', 'type': 'str'}, 'linked_experiment_name': {'key': 'linkedExperimentName', 'type': 'str'}, 'graph_nodes_run_id_mapping': {'key': 'graphNodesRunIdMapping', 'type': '{str}'}, 'workflow': {'key': 'workflow', 'type': 'PipelineGraph'}, 'inputs': {'key': 'inputs', 'type': '[InputOutputPortMetadata]'}, 'outputs': {'key': 'outputs', 'type': '[InputOutputPortMetadata]'}, 'example_request': {'key': 'exampleRequest', 'type': 'ExampleRequest'}, 'user_storage_connection_string': {'key': 'userStorageConnectionString', 'type': 'str'}, 'user_storage_endpoint_uri': {'key': 'userStorageEndpointUri', 'type': 'str'}, 'user_storage_workspace_sai_token': {'key': 'userStorageWorkspaceSaiToken', 'type': 'str'}, 'user_storage_container_name': {'key': 'userStorageContainerName', 'type': 'str'}, 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, 'root_pipeline_run_id': {'key': 'rootPipelineRunId', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword compute_info: :paramtype compute_info: ~flow.models.ComputeInfo :keyword description: :paramtype description: str :keyword linked_pipeline_draft_id: :paramtype linked_pipeline_draft_id: str :keyword linked_pipeline_run_id: :paramtype linked_pipeline_run_id: str :keyword aks_advance_settings: :paramtype aks_advance_settings: ~flow.models.AKSAdvanceSettings :keyword aci_advance_settings: :paramtype aci_advance_settings: ~flow.models.ACIAdvanceSettings :keyword linked_training_pipeline_run_id: :paramtype linked_training_pipeline_run_id: str :keyword linked_experiment_name: :paramtype linked_experiment_name: str :keyword graph_nodes_run_id_mapping: This is a dictionary. :paramtype graph_nodes_run_id_mapping: dict[str, str] :keyword workflow: :paramtype workflow: ~flow.models.PipelineGraph :keyword inputs: :paramtype inputs: list[~flow.models.InputOutputPortMetadata] :keyword outputs: :paramtype outputs: list[~flow.models.InputOutputPortMetadata] :keyword example_request: :paramtype example_request: ~flow.models.ExampleRequest :keyword user_storage_connection_string: :paramtype user_storage_connection_string: str :keyword user_storage_endpoint_uri: :paramtype user_storage_endpoint_uri: str :keyword user_storage_workspace_sai_token: :paramtype user_storage_workspace_sai_token: str :keyword user_storage_container_name: :paramtype user_storage_container_name: str :keyword pipeline_run_id: :paramtype pipeline_run_id: str :keyword root_pipeline_run_id: :paramtype root_pipeline_run_id: str :keyword experiment_name: :paramtype experiment_name: str :keyword experiment_id: :paramtype experiment_id: str """ super(CreateRealTimeEndpointRequest, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.compute_info = kwargs.get('compute_info', None) self.description = kwargs.get('description', None) self.linked_pipeline_draft_id = kwargs.get('linked_pipeline_draft_id', None) self.linked_pipeline_run_id = kwargs.get('linked_pipeline_run_id', None) self.aks_advance_settings = kwargs.get('aks_advance_settings', None) self.aci_advance_settings = kwargs.get('aci_advance_settings', None) self.linked_training_pipeline_run_id = kwargs.get('linked_training_pipeline_run_id', None) self.linked_experiment_name = kwargs.get('linked_experiment_name', None) self.graph_nodes_run_id_mapping = kwargs.get('graph_nodes_run_id_mapping', None) self.workflow = kwargs.get('workflow', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.example_request = kwargs.get('example_request', None) self.user_storage_connection_string = kwargs.get('user_storage_connection_string', None) self.user_storage_endpoint_uri = kwargs.get('user_storage_endpoint_uri', None) self.user_storage_workspace_sai_token = kwargs.get('user_storage_workspace_sai_token', None) self.user_storage_container_name = kwargs.get('user_storage_container_name', None) self.pipeline_run_id = kwargs.get('pipeline_run_id', None) self.root_pipeline_run_id = kwargs.get('root_pipeline_run_id', None) self.experiment_name = kwargs.get('experiment_name', None) self.experiment_id = kwargs.get('experiment_id', None) class CreationContext(msrest.serialization.Model): """CreationContext. :ivar created_time: :vartype created_time: ~datetime.datetime :ivar created_by: :vartype created_by: ~flow.models.SchemaContractsCreatedBy :ivar creation_source: :vartype creation_source: str """ _attribute_map = { 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'}, 'creation_source': {'key': 'creationSource', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword created_time: :paramtype created_time: ~datetime.datetime :keyword created_by: :paramtype created_by: ~flow.models.SchemaContractsCreatedBy :keyword creation_source: :paramtype creation_source: str """ super(CreationContext, self).__init__(**kwargs) self.created_time = kwargs.get('created_time', None) self.created_by = kwargs.get('created_by', None) self.creation_source = kwargs.get('creation_source', None) class Cron(msrest.serialization.Model): """Cron. :ivar expression: :vartype expression: str :ivar end_time: :vartype end_time: str :ivar start_time: :vartype start_time: str :ivar time_zone: :vartype time_zone: str """ _attribute_map = { 'expression': {'key': 'expression', 'type': 'str'}, 'end_time': {'key': 'endTime', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'str'}, 'time_zone': {'key': 'timeZone', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword expression: :paramtype expression: str :keyword end_time: :paramtype end_time: str :keyword start_time: :paramtype start_time: str :keyword time_zone: :paramtype time_zone: str """ super(Cron, self).__init__(**kwargs) self.expression = kwargs.get('expression', None) self.end_time = kwargs.get('end_time', None) self.start_time = kwargs.get('start_time', None) self.time_zone = kwargs.get('time_zone', None) class CustomConnectionConfig(msrest.serialization.Model): """CustomConnectionConfig. :ivar config_value_type: Possible values include: "String", "Secret". :vartype config_value_type: str or ~flow.models.ConfigValueType :ivar value: :vartype value: str """ _attribute_map = { 'config_value_type': {'key': 'configValueType', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword config_value_type: Possible values include: "String", "Secret". :paramtype config_value_type: str or ~flow.models.ConfigValueType :keyword value: :paramtype value: str """ super(CustomConnectionConfig, self).__init__(**kwargs) self.config_value_type = kwargs.get('config_value_type', None) self.value = kwargs.get('value', None) class CustomReference(msrest.serialization.Model): """CustomReference. :ivar aml_data_store_name: :vartype aml_data_store_name: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword aml_data_store_name: :paramtype aml_data_store_name: str :keyword relative_path: :paramtype relative_path: str """ super(CustomReference, self).__init__(**kwargs) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) self.relative_path = kwargs.get('relative_path', None) class Data(msrest.serialization.Model): """Data. :ivar data_location: :vartype data_location: ~flow.models.ExecutionDataLocation :ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs". :vartype mechanism: str or ~flow.models.DeliveryMechanism :ivar environment_variable_name: :vartype environment_variable_name: str :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool :ivar options: Dictionary of :code:`<string>`. :vartype options: dict[str, str] """ _attribute_map = { 'data_location': {'key': 'dataLocation', 'type': 'ExecutionDataLocation'}, 'mechanism': {'key': 'mechanism', 'type': 'str'}, 'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'options': {'key': 'options', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword data_location: :paramtype data_location: ~flow.models.ExecutionDataLocation :keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs". :paramtype mechanism: str or ~flow.models.DeliveryMechanism :keyword environment_variable_name: :paramtype environment_variable_name: str :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool :keyword options: Dictionary of :code:`<string>`. :paramtype options: dict[str, str] """ super(Data, self).__init__(**kwargs) self.data_location = kwargs.get('data_location', None) self.mechanism = kwargs.get('mechanism', None) self.environment_variable_name = kwargs.get('environment_variable_name', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.overwrite = kwargs.get('overwrite', None) self.options = kwargs.get('options', None) class DatabaseSink(msrest.serialization.Model): """DatabaseSink. :ivar connection: :vartype connection: str :ivar table: :vartype table: str """ _attribute_map = { 'connection': {'key': 'connection', 'type': 'str'}, 'table': {'key': 'table', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword connection: :paramtype connection: str :keyword table: :paramtype table: str """ super(DatabaseSink, self).__init__(**kwargs) self.connection = kwargs.get('connection', None) self.table = kwargs.get('table', None) class DatabaseSource(msrest.serialization.Model): """DatabaseSource. :ivar connection: :vartype connection: str :ivar query: :vartype query: str :ivar stored_procedure_name: :vartype stored_procedure_name: str :ivar stored_procedure_parameters: :vartype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter] """ _attribute_map = { 'connection': {'key': 'connection', 'type': 'str'}, 'query': {'key': 'query', 'type': 'str'}, 'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[StoredProcedureParameter]'}, } def __init__( self, **kwargs ): """ :keyword connection: :paramtype connection: str :keyword query: :paramtype query: str :keyword stored_procedure_name: :paramtype stored_procedure_name: str :keyword stored_procedure_parameters: :paramtype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter] """ super(DatabaseSource, self).__init__(**kwargs) self.connection = kwargs.get('connection', None) self.query = kwargs.get('query', None) self.stored_procedure_name = kwargs.get('stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) class DatabricksComputeInfo(msrest.serialization.Model): """DatabricksComputeInfo. :ivar existing_cluster_id: :vartype existing_cluster_id: str """ _attribute_map = { 'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword existing_cluster_id: :paramtype existing_cluster_id: str """ super(DatabricksComputeInfo, self).__init__(**kwargs) self.existing_cluster_id = kwargs.get('existing_cluster_id', None) class DatabricksConfiguration(msrest.serialization.Model): """DatabricksConfiguration. :ivar workers: :vartype workers: int :ivar minimum_worker_count: :vartype minimum_worker_count: int :ivar max_mum_worker_count: :vartype max_mum_worker_count: int :ivar spark_version: :vartype spark_version: str :ivar node_type_id: :vartype node_type_id: str :ivar spark_conf: Dictionary of :code:`<string>`. :vartype spark_conf: dict[str, str] :ivar spark_env_vars: Dictionary of :code:`<string>`. :vartype spark_env_vars: dict[str, str] :ivar cluster_log_conf_dbfs_path: :vartype cluster_log_conf_dbfs_path: str :ivar dbfs_init_scripts: :vartype dbfs_init_scripts: list[~flow.models.InitScriptInfoDto] :ivar instance_pool_id: :vartype instance_pool_id: str :ivar timeout_seconds: :vartype timeout_seconds: int :ivar notebook_task: :vartype notebook_task: ~flow.models.NoteBookTaskDto :ivar spark_python_task: :vartype spark_python_task: ~flow.models.SparkPythonTaskDto :ivar spark_jar_task: :vartype spark_jar_task: ~flow.models.SparkJarTaskDto :ivar spark_submit_task: :vartype spark_submit_task: ~flow.models.SparkSubmitTaskDto :ivar jar_libraries: :vartype jar_libraries: list[str] :ivar egg_libraries: :vartype egg_libraries: list[str] :ivar whl_libraries: :vartype whl_libraries: list[str] :ivar pypi_libraries: :vartype pypi_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto] :ivar r_cran_libraries: :vartype r_cran_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto] :ivar maven_libraries: :vartype maven_libraries: list[~flow.models.MavenLibraryDto] :ivar libraries: :vartype libraries: list[any] :ivar linked_adb_workspace_metadata: :vartype linked_adb_workspace_metadata: ~flow.models.LinkedADBWorkspaceMetadata :ivar databrick_resource_id: :vartype databrick_resource_id: str :ivar auto_scale: :vartype auto_scale: bool """ _attribute_map = { 'workers': {'key': 'workers', 'type': 'int'}, 'minimum_worker_count': {'key': 'minimumWorkerCount', 'type': 'int'}, 'max_mum_worker_count': {'key': 'maxMumWorkerCount', 'type': 'int'}, 'spark_version': {'key': 'sparkVersion', 'type': 'str'}, 'node_type_id': {'key': 'nodeTypeId', 'type': 'str'}, 'spark_conf': {'key': 'sparkConf', 'type': '{str}'}, 'spark_env_vars': {'key': 'sparkEnvVars', 'type': '{str}'}, 'cluster_log_conf_dbfs_path': {'key': 'clusterLogConfDbfsPath', 'type': 'str'}, 'dbfs_init_scripts': {'key': 'dbfsInitScripts', 'type': '[InitScriptInfoDto]'}, 'instance_pool_id': {'key': 'instancePoolId', 'type': 'str'}, 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'}, 'notebook_task': {'key': 'notebookTask', 'type': 'NoteBookTaskDto'}, 'spark_python_task': {'key': 'sparkPythonTask', 'type': 'SparkPythonTaskDto'}, 'spark_jar_task': {'key': 'sparkJarTask', 'type': 'SparkJarTaskDto'}, 'spark_submit_task': {'key': 'sparkSubmitTask', 'type': 'SparkSubmitTaskDto'}, 'jar_libraries': {'key': 'jarLibraries', 'type': '[str]'}, 'egg_libraries': {'key': 'eggLibraries', 'type': '[str]'}, 'whl_libraries': {'key': 'whlLibraries', 'type': '[str]'}, 'pypi_libraries': {'key': 'pypiLibraries', 'type': '[PythonPyPiOrRCranLibraryDto]'}, 'r_cran_libraries': {'key': 'rCranLibraries', 'type': '[PythonPyPiOrRCranLibraryDto]'}, 'maven_libraries': {'key': 'mavenLibraries', 'type': '[MavenLibraryDto]'}, 'libraries': {'key': 'libraries', 'type': '[object]'}, 'linked_adb_workspace_metadata': {'key': 'linkedADBWorkspaceMetadata', 'type': 'LinkedADBWorkspaceMetadata'}, 'databrick_resource_id': {'key': 'databrickResourceId', 'type': 'str'}, 'auto_scale': {'key': 'autoScale', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword workers: :paramtype workers: int :keyword minimum_worker_count: :paramtype minimum_worker_count: int :keyword max_mum_worker_count: :paramtype max_mum_worker_count: int :keyword spark_version: :paramtype spark_version: str :keyword node_type_id: :paramtype node_type_id: str :keyword spark_conf: Dictionary of :code:`<string>`. :paramtype spark_conf: dict[str, str] :keyword spark_env_vars: Dictionary of :code:`<string>`. :paramtype spark_env_vars: dict[str, str] :keyword cluster_log_conf_dbfs_path: :paramtype cluster_log_conf_dbfs_path: str :keyword dbfs_init_scripts: :paramtype dbfs_init_scripts: list[~flow.models.InitScriptInfoDto] :keyword instance_pool_id: :paramtype instance_pool_id: str :keyword timeout_seconds: :paramtype timeout_seconds: int :keyword notebook_task: :paramtype notebook_task: ~flow.models.NoteBookTaskDto :keyword spark_python_task: :paramtype spark_python_task: ~flow.models.SparkPythonTaskDto :keyword spark_jar_task: :paramtype spark_jar_task: ~flow.models.SparkJarTaskDto :keyword spark_submit_task: :paramtype spark_submit_task: ~flow.models.SparkSubmitTaskDto :keyword jar_libraries: :paramtype jar_libraries: list[str] :keyword egg_libraries: :paramtype egg_libraries: list[str] :keyword whl_libraries: :paramtype whl_libraries: list[str] :keyword pypi_libraries: :paramtype pypi_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto] :keyword r_cran_libraries: :paramtype r_cran_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto] :keyword maven_libraries: :paramtype maven_libraries: list[~flow.models.MavenLibraryDto] :keyword libraries: :paramtype libraries: list[any] :keyword linked_adb_workspace_metadata: :paramtype linked_adb_workspace_metadata: ~flow.models.LinkedADBWorkspaceMetadata :keyword databrick_resource_id: :paramtype databrick_resource_id: str :keyword auto_scale: :paramtype auto_scale: bool """ super(DatabricksConfiguration, self).__init__(**kwargs) self.workers = kwargs.get('workers', None) self.minimum_worker_count = kwargs.get('minimum_worker_count', None) self.max_mum_worker_count = kwargs.get('max_mum_worker_count', None) self.spark_version = kwargs.get('spark_version', None) self.node_type_id = kwargs.get('node_type_id', None) self.spark_conf = kwargs.get('spark_conf', None) self.spark_env_vars = kwargs.get('spark_env_vars', None) self.cluster_log_conf_dbfs_path = kwargs.get('cluster_log_conf_dbfs_path', None) self.dbfs_init_scripts = kwargs.get('dbfs_init_scripts', None) self.instance_pool_id = kwargs.get('instance_pool_id', None) self.timeout_seconds = kwargs.get('timeout_seconds', None) self.notebook_task = kwargs.get('notebook_task', None) self.spark_python_task = kwargs.get('spark_python_task', None) self.spark_jar_task = kwargs.get('spark_jar_task', None) self.spark_submit_task = kwargs.get('spark_submit_task', None) self.jar_libraries = kwargs.get('jar_libraries', None) self.egg_libraries = kwargs.get('egg_libraries', None) self.whl_libraries = kwargs.get('whl_libraries', None) self.pypi_libraries = kwargs.get('pypi_libraries', None) self.r_cran_libraries = kwargs.get('r_cran_libraries', None) self.maven_libraries = kwargs.get('maven_libraries', None) self.libraries = kwargs.get('libraries', None) self.linked_adb_workspace_metadata = kwargs.get('linked_adb_workspace_metadata', None) self.databrick_resource_id = kwargs.get('databrick_resource_id', None) self.auto_scale = kwargs.get('auto_scale', None) class DatacacheConfiguration(msrest.serialization.Model): """DatacacheConfiguration. :ivar datacache_id: :vartype datacache_id: str :ivar datacache_store: :vartype datacache_store: str :ivar dataset_id: :vartype dataset_id: str :ivar mode: The only acceptable values to pass in are None and "Mount". The default value is None. :vartype mode: str :ivar replica: :vartype replica: int :ivar failure_fallback: :vartype failure_fallback: bool :ivar path_on_compute: :vartype path_on_compute: str """ _attribute_map = { 'datacache_id': {'key': 'datacacheId', 'type': 'str'}, 'datacache_store': {'key': 'datacacheStore', 'type': 'str'}, 'dataset_id': {'key': 'datasetId', 'type': 'str'}, 'mode': {'key': 'mode', 'type': 'str'}, 'replica': {'key': 'replica', 'type': 'int'}, 'failure_fallback': {'key': 'failureFallback', 'type': 'bool'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword datacache_id: :paramtype datacache_id: str :keyword datacache_store: :paramtype datacache_store: str :keyword dataset_id: :paramtype dataset_id: str :keyword mode: The only acceptable values to pass in are None and "Mount". The default value is None. :paramtype mode: str :keyword replica: :paramtype replica: int :keyword failure_fallback: :paramtype failure_fallback: bool :keyword path_on_compute: :paramtype path_on_compute: str """ super(DatacacheConfiguration, self).__init__(**kwargs) self.datacache_id = kwargs.get('datacache_id', None) self.datacache_store = kwargs.get('datacache_store', None) self.dataset_id = kwargs.get('dataset_id', None) self.mode = kwargs.get('mode', None) self.replica = kwargs.get('replica', None) self.failure_fallback = kwargs.get('failure_fallback', None) self.path_on_compute = kwargs.get('path_on_compute', None) class DataInfo(msrest.serialization.Model): """DataInfo. :ivar feed_name: :vartype feed_name: str :ivar id: :vartype id: str :ivar data_source_type: Possible values include: "None", "PipelineDataSource", "AmlDataset", "GlobalDataset", "FeedModel", "FeedDataset", "AmlDataVersion", "AMLModelVersion". :vartype data_source_type: str or ~flow.models.DataSourceType :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar data_type_id: :vartype data_type_id: str :ivar aml_data_store_name: :vartype aml_data_store_name: str :ivar relative_path: :vartype relative_path: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar modified_date: :vartype modified_date: ~datetime.datetime :ivar registered_by: :vartype registered_by: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar created_by_studio: :vartype created_by_studio: bool :ivar data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :vartype data_reference_type: str or ~flow.models.DataReferenceType :ivar dataset_type: :vartype dataset_type: str :ivar saved_dataset_id: :vartype saved_dataset_id: str :ivar dataset_version_id: :vartype dataset_version_id: str :ivar is_visible: :vartype is_visible: bool :ivar is_registered: :vartype is_registered: bool :ivar properties: This is a dictionary. :vartype properties: dict[str, any] :ivar connection_string: :vartype connection_string: str :ivar container_name: :vartype container_name: str :ivar data_storage_endpoint_uri: :vartype data_storage_endpoint_uri: str :ivar workspace_sai_token: :vartype workspace_sai_token: str :ivar aml_dataset_data_flow: :vartype aml_dataset_data_flow: str :ivar system_data: :vartype system_data: ~flow.models.SystemData :ivar arm_id: :vartype arm_id: str :ivar asset_id: :vartype asset_id: str :ivar asset_uri: :vartype asset_uri: str :ivar asset_type: :vartype asset_type: str :ivar is_data_v2: :vartype is_data_v2: bool :ivar asset_scope_type: Possible values include: "Workspace", "Global", "All", "Feed". :vartype asset_scope_type: str or ~flow.models.AssetScopeTypes :ivar pipeline_run_id: :vartype pipeline_run_id: str :ivar module_node_id: :vartype module_node_id: str :ivar output_port_name: :vartype output_port_name: str """ _attribute_map = { 'feed_name': {'key': 'feedName', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'data_type_id': {'key': 'dataTypeId', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'modified_date': {'key': 'modifiedDate', 'type': 'iso-8601'}, 'registered_by': {'key': 'registeredBy', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'}, 'data_reference_type': {'key': 'dataReferenceType', 'type': 'str'}, 'dataset_type': {'key': 'datasetType', 'type': 'str'}, 'saved_dataset_id': {'key': 'savedDatasetId', 'type': 'str'}, 'dataset_version_id': {'key': 'datasetVersionId', 'type': 'str'}, 'is_visible': {'key': 'isVisible', 'type': 'bool'}, 'is_registered': {'key': 'isRegistered', 'type': 'bool'}, 'properties': {'key': 'properties', 'type': '{object}'}, 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'container_name': {'key': 'containerName', 'type': 'str'}, 'data_storage_endpoint_uri': {'key': 'dataStorageEndpointUri', 'type': 'str'}, 'workspace_sai_token': {'key': 'workspaceSaiToken', 'type': 'str'}, 'aml_dataset_data_flow': {'key': 'amlDatasetDataFlow', 'type': 'str'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'arm_id': {'key': 'armId', 'type': 'str'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, 'asset_uri': {'key': 'assetUri', 'type': 'str'}, 'asset_type': {'key': 'assetType', 'type': 'str'}, 'is_data_v2': {'key': 'isDataV2', 'type': 'bool'}, 'asset_scope_type': {'key': 'assetScopeType', 'type': 'str'}, 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, 'module_node_id': {'key': 'moduleNodeId', 'type': 'str'}, 'output_port_name': {'key': 'outputPortName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword feed_name: :paramtype feed_name: str :keyword id: :paramtype id: str :keyword data_source_type: Possible values include: "None", "PipelineDataSource", "AmlDataset", "GlobalDataset", "FeedModel", "FeedDataset", "AmlDataVersion", "AMLModelVersion". :paramtype data_source_type: str or ~flow.models.DataSourceType :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword data_type_id: :paramtype data_type_id: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str :keyword relative_path: :paramtype relative_path: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword modified_date: :paramtype modified_date: ~datetime.datetime :keyword registered_by: :paramtype registered_by: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword created_by_studio: :paramtype created_by_studio: bool :keyword data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :paramtype data_reference_type: str or ~flow.models.DataReferenceType :keyword dataset_type: :paramtype dataset_type: str :keyword saved_dataset_id: :paramtype saved_dataset_id: str :keyword dataset_version_id: :paramtype dataset_version_id: str :keyword is_visible: :paramtype is_visible: bool :keyword is_registered: :paramtype is_registered: bool :keyword properties: This is a dictionary. :paramtype properties: dict[str, any] :keyword connection_string: :paramtype connection_string: str :keyword container_name: :paramtype container_name: str :keyword data_storage_endpoint_uri: :paramtype data_storage_endpoint_uri: str :keyword workspace_sai_token: :paramtype workspace_sai_token: str :keyword aml_dataset_data_flow: :paramtype aml_dataset_data_flow: str :keyword system_data: :paramtype system_data: ~flow.models.SystemData :keyword arm_id: :paramtype arm_id: str :keyword asset_id: :paramtype asset_id: str :keyword asset_uri: :paramtype asset_uri: str :keyword asset_type: :paramtype asset_type: str :keyword is_data_v2: :paramtype is_data_v2: bool :keyword asset_scope_type: Possible values include: "Workspace", "Global", "All", "Feed". :paramtype asset_scope_type: str or ~flow.models.AssetScopeTypes :keyword pipeline_run_id: :paramtype pipeline_run_id: str :keyword module_node_id: :paramtype module_node_id: str :keyword output_port_name: :paramtype output_port_name: str """ super(DataInfo, self).__init__(**kwargs) self.feed_name = kwargs.get('feed_name', None) self.id = kwargs.get('id', None) self.data_source_type = kwargs.get('data_source_type', None) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.data_type_id = kwargs.get('data_type_id', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) self.relative_path = kwargs.get('relative_path', None) self.created_date = kwargs.get('created_date', None) self.modified_date = kwargs.get('modified_date', None) self.registered_by = kwargs.get('registered_by', None) self.tags = kwargs.get('tags', None) self.created_by_studio = kwargs.get('created_by_studio', None) self.data_reference_type = kwargs.get('data_reference_type', None) self.dataset_type = kwargs.get('dataset_type', None) self.saved_dataset_id = kwargs.get('saved_dataset_id', None) self.dataset_version_id = kwargs.get('dataset_version_id', None) self.is_visible = kwargs.get('is_visible', None) self.is_registered = kwargs.get('is_registered', None) self.properties = kwargs.get('properties', None) self.connection_string = kwargs.get('connection_string', None) self.container_name = kwargs.get('container_name', None) self.data_storage_endpoint_uri = kwargs.get('data_storage_endpoint_uri', None) self.workspace_sai_token = kwargs.get('workspace_sai_token', None) self.aml_dataset_data_flow = kwargs.get('aml_dataset_data_flow', None) self.system_data = kwargs.get('system_data', None) self.arm_id = kwargs.get('arm_id', None) self.asset_id = kwargs.get('asset_id', None) self.asset_uri = kwargs.get('asset_uri', None) self.asset_type = kwargs.get('asset_type', None) self.is_data_v2 = kwargs.get('is_data_v2', None) self.asset_scope_type = kwargs.get('asset_scope_type', None) self.pipeline_run_id = kwargs.get('pipeline_run_id', None) self.module_node_id = kwargs.get('module_node_id', None) self.output_port_name = kwargs.get('output_port_name', None) class DataLocation(msrest.serialization.Model): """DataLocation. :ivar storage_type: Possible values include: "None", "AzureBlob", "Artifact", "Snapshot", "SavedAmlDataset", "Asset". :vartype storage_type: str or ~flow.models.DataLocationStorageType :ivar storage_id: :vartype storage_id: str :ivar uri: :vartype uri: str :ivar data_store_name: :vartype data_store_name: str :ivar data_reference: :vartype data_reference: ~flow.models.DataReference :ivar aml_dataset: :vartype aml_dataset: ~flow.models.AmlDataset :ivar asset_definition: :vartype asset_definition: ~flow.models.AssetDefinition """ _attribute_map = { 'storage_type': {'key': 'storageType', 'type': 'str'}, 'storage_id': {'key': 'storageId', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_reference': {'key': 'dataReference', 'type': 'DataReference'}, 'aml_dataset': {'key': 'amlDataset', 'type': 'AmlDataset'}, 'asset_definition': {'key': 'assetDefinition', 'type': 'AssetDefinition'}, } def __init__( self, **kwargs ): """ :keyword storage_type: Possible values include: "None", "AzureBlob", "Artifact", "Snapshot", "SavedAmlDataset", "Asset". :paramtype storage_type: str or ~flow.models.DataLocationStorageType :keyword storage_id: :paramtype storage_id: str :keyword uri: :paramtype uri: str :keyword data_store_name: :paramtype data_store_name: str :keyword data_reference: :paramtype data_reference: ~flow.models.DataReference :keyword aml_dataset: :paramtype aml_dataset: ~flow.models.AmlDataset :keyword asset_definition: :paramtype asset_definition: ~flow.models.AssetDefinition """ super(DataLocation, self).__init__(**kwargs) self.storage_type = kwargs.get('storage_type', None) self.storage_id = kwargs.get('storage_id', None) self.uri = kwargs.get('uri', None) self.data_store_name = kwargs.get('data_store_name', None) self.data_reference = kwargs.get('data_reference', None) self.aml_dataset = kwargs.get('aml_dataset', None) self.asset_definition = kwargs.get('asset_definition', None) class DataPath(msrest.serialization.Model): """DataPath. :ivar data_store_name: :vartype data_store_name: str :ivar relative_path: :vartype relative_path: str :ivar sql_data_path: :vartype sql_data_path: ~flow.models.SqlDataPath """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'sql_data_path': {'key': 'sqlDataPath', 'type': 'SqlDataPath'}, } def __init__( self, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword relative_path: :paramtype relative_path: str :keyword sql_data_path: :paramtype sql_data_path: ~flow.models.SqlDataPath """ super(DataPath, self).__init__(**kwargs) self.data_store_name = kwargs.get('data_store_name', None) self.relative_path = kwargs.get('relative_path', None) self.sql_data_path = kwargs.get('sql_data_path', None) class DataPathParameter(msrest.serialization.Model): """DataPathParameter. :ivar name: :vartype name: str :ivar documentation: :vartype documentation: str :ivar default_value: :vartype default_value: ~flow.models.LegacyDataPath :ivar is_optional: :vartype is_optional: bool :ivar data_type_id: :vartype data_type_id: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'documentation': {'key': 'documentation', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'LegacyDataPath'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'data_type_id': {'key': 'dataTypeId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword documentation: :paramtype documentation: str :keyword default_value: :paramtype default_value: ~flow.models.LegacyDataPath :keyword is_optional: :paramtype is_optional: bool :keyword data_type_id: :paramtype data_type_id: str """ super(DataPathParameter, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.documentation = kwargs.get('documentation', None) self.default_value = kwargs.get('default_value', None) self.is_optional = kwargs.get('is_optional', None) self.data_type_id = kwargs.get('data_type_id', None) class DataPortDto(msrest.serialization.Model): """DataPortDto. :ivar data_port_type: Possible values include: "Input", "Output". :vartype data_port_type: str or ~flow.models.DataPortType :ivar data_port_name: :vartype data_port_name: str :ivar data_store_name: :vartype data_store_name: str :ivar data_store_intellectual_property_access_mode: Possible values include: "ReadOnly", "ReadWrite". :vartype data_store_intellectual_property_access_mode: str or ~flow.models.IntellectualPropertyAccessMode :ivar data_store_intellectual_property_publisher: :vartype data_store_intellectual_property_publisher: str """ _attribute_map = { 'data_port_type': {'key': 'dataPortType', 'type': 'str'}, 'data_port_name': {'key': 'dataPortName', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_intellectual_property_access_mode': {'key': 'dataStoreIntellectualPropertyAccessMode', 'type': 'str'}, 'data_store_intellectual_property_publisher': {'key': 'dataStoreIntellectualPropertyPublisher', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_port_type: Possible values include: "Input", "Output". :paramtype data_port_type: str or ~flow.models.DataPortType :keyword data_port_name: :paramtype data_port_name: str :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_intellectual_property_access_mode: Possible values include: "ReadOnly", "ReadWrite". :paramtype data_store_intellectual_property_access_mode: str or ~flow.models.IntellectualPropertyAccessMode :keyword data_store_intellectual_property_publisher: :paramtype data_store_intellectual_property_publisher: str """ super(DataPortDto, self).__init__(**kwargs) self.data_port_type = kwargs.get('data_port_type', None) self.data_port_name = kwargs.get('data_port_name', None) self.data_store_name = kwargs.get('data_store_name', None) self.data_store_intellectual_property_access_mode = kwargs.get('data_store_intellectual_property_access_mode', None) self.data_store_intellectual_property_publisher = kwargs.get('data_store_intellectual_property_publisher', None) class DataReference(msrest.serialization.Model): """DataReference. :ivar type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :vartype type: str or ~flow.models.DataReferenceType :ivar azure_blob_reference: :vartype azure_blob_reference: ~flow.models.AzureBlobReference :ivar azure_data_lake_reference: :vartype azure_data_lake_reference: ~flow.models.AzureDataLakeReference :ivar azure_files_reference: :vartype azure_files_reference: ~flow.models.AzureFilesReference :ivar azure_sql_database_reference: :vartype azure_sql_database_reference: ~flow.models.AzureDatabaseReference :ivar azure_postgres_database_reference: :vartype azure_postgres_database_reference: ~flow.models.AzureDatabaseReference :ivar azure_data_lake_gen2_reference: :vartype azure_data_lake_gen2_reference: ~flow.models.AzureDataLakeGen2Reference :ivar dbfs_reference: :vartype dbfs_reference: ~flow.models.DBFSReference :ivar azure_my_sql_database_reference: :vartype azure_my_sql_database_reference: ~flow.models.AzureDatabaseReference :ivar custom_reference: :vartype custom_reference: ~flow.models.CustomReference :ivar hdfs_reference: :vartype hdfs_reference: ~flow.models.HdfsReference """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'azure_blob_reference': {'key': 'azureBlobReference', 'type': 'AzureBlobReference'}, 'azure_data_lake_reference': {'key': 'azureDataLakeReference', 'type': 'AzureDataLakeReference'}, 'azure_files_reference': {'key': 'azureFilesReference', 'type': 'AzureFilesReference'}, 'azure_sql_database_reference': {'key': 'azureSqlDatabaseReference', 'type': 'AzureDatabaseReference'}, 'azure_postgres_database_reference': {'key': 'azurePostgresDatabaseReference', 'type': 'AzureDatabaseReference'}, 'azure_data_lake_gen2_reference': {'key': 'azureDataLakeGen2Reference', 'type': 'AzureDataLakeGen2Reference'}, 'dbfs_reference': {'key': 'dbfsReference', 'type': 'DBFSReference'}, 'azure_my_sql_database_reference': {'key': 'azureMySqlDatabaseReference', 'type': 'AzureDatabaseReference'}, 'custom_reference': {'key': 'customReference', 'type': 'CustomReference'}, 'hdfs_reference': {'key': 'hdfsReference', 'type': 'HdfsReference'}, } def __init__( self, **kwargs ): """ :keyword type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :paramtype type: str or ~flow.models.DataReferenceType :keyword azure_blob_reference: :paramtype azure_blob_reference: ~flow.models.AzureBlobReference :keyword azure_data_lake_reference: :paramtype azure_data_lake_reference: ~flow.models.AzureDataLakeReference :keyword azure_files_reference: :paramtype azure_files_reference: ~flow.models.AzureFilesReference :keyword azure_sql_database_reference: :paramtype azure_sql_database_reference: ~flow.models.AzureDatabaseReference :keyword azure_postgres_database_reference: :paramtype azure_postgres_database_reference: ~flow.models.AzureDatabaseReference :keyword azure_data_lake_gen2_reference: :paramtype azure_data_lake_gen2_reference: ~flow.models.AzureDataLakeGen2Reference :keyword dbfs_reference: :paramtype dbfs_reference: ~flow.models.DBFSReference :keyword azure_my_sql_database_reference: :paramtype azure_my_sql_database_reference: ~flow.models.AzureDatabaseReference :keyword custom_reference: :paramtype custom_reference: ~flow.models.CustomReference :keyword hdfs_reference: :paramtype hdfs_reference: ~flow.models.HdfsReference """ super(DataReference, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.azure_blob_reference = kwargs.get('azure_blob_reference', None) self.azure_data_lake_reference = kwargs.get('azure_data_lake_reference', None) self.azure_files_reference = kwargs.get('azure_files_reference', None) self.azure_sql_database_reference = kwargs.get('azure_sql_database_reference', None) self.azure_postgres_database_reference = kwargs.get('azure_postgres_database_reference', None) self.azure_data_lake_gen2_reference = kwargs.get('azure_data_lake_gen2_reference', None) self.dbfs_reference = kwargs.get('dbfs_reference', None) self.azure_my_sql_database_reference = kwargs.get('azure_my_sql_database_reference', None) self.custom_reference = kwargs.get('custom_reference', None) self.hdfs_reference = kwargs.get('hdfs_reference', None) class DataReferenceConfiguration(msrest.serialization.Model): """DataReferenceConfiguration. :ivar data_store_name: :vartype data_store_name: str :ivar mode: Possible values include: "Mount", "Download", "Upload". :vartype mode: str or ~flow.models.DataStoreMode :ivar path_on_data_store: :vartype path_on_data_store: str :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'mode': {'key': 'mode', 'type': 'str'}, 'path_on_data_store': {'key': 'pathOnDataStore', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword mode: Possible values include: "Mount", "Download", "Upload". :paramtype mode: str or ~flow.models.DataStoreMode :keyword path_on_data_store: :paramtype path_on_data_store: str :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool """ super(DataReferenceConfiguration, self).__init__(**kwargs) self.data_store_name = kwargs.get('data_store_name', None) self.mode = kwargs.get('mode', None) self.path_on_data_store = kwargs.get('path_on_data_store', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.overwrite = kwargs.get('overwrite', None) class DataSetDefinition(msrest.serialization.Model): """DataSetDefinition. :ivar data_type_short_name: :vartype data_type_short_name: str :ivar parameter_name: :vartype parameter_name: str :ivar value: :vartype value: ~flow.models.DataSetDefinitionValue """ _attribute_map = { 'data_type_short_name': {'key': 'dataTypeShortName', 'type': 'str'}, 'parameter_name': {'key': 'parameterName', 'type': 'str'}, 'value': {'key': 'value', 'type': 'DataSetDefinitionValue'}, } def __init__( self, **kwargs ): """ :keyword data_type_short_name: :paramtype data_type_short_name: str :keyword parameter_name: :paramtype parameter_name: str :keyword value: :paramtype value: ~flow.models.DataSetDefinitionValue """ super(DataSetDefinition, self).__init__(**kwargs) self.data_type_short_name = kwargs.get('data_type_short_name', None) self.parameter_name = kwargs.get('parameter_name', None) self.value = kwargs.get('value', None) class DataSetDefinitionValue(msrest.serialization.Model): """DataSetDefinitionValue. :ivar literal_value: :vartype literal_value: ~flow.models.DataPath :ivar data_set_reference: :vartype data_set_reference: ~flow.models.RegisteredDataSetReference :ivar saved_data_set_reference: :vartype saved_data_set_reference: ~flow.models.SavedDataSetReference :ivar asset_definition: :vartype asset_definition: ~flow.models.AssetDefinition """ _attribute_map = { 'literal_value': {'key': 'literalValue', 'type': 'DataPath'}, 'data_set_reference': {'key': 'dataSetReference', 'type': 'RegisteredDataSetReference'}, 'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'SavedDataSetReference'}, 'asset_definition': {'key': 'assetDefinition', 'type': 'AssetDefinition'}, } def __init__( self, **kwargs ): """ :keyword literal_value: :paramtype literal_value: ~flow.models.DataPath :keyword data_set_reference: :paramtype data_set_reference: ~flow.models.RegisteredDataSetReference :keyword saved_data_set_reference: :paramtype saved_data_set_reference: ~flow.models.SavedDataSetReference :keyword asset_definition: :paramtype asset_definition: ~flow.models.AssetDefinition """ super(DataSetDefinitionValue, self).__init__(**kwargs) self.literal_value = kwargs.get('literal_value', None) self.data_set_reference = kwargs.get('data_set_reference', None) self.saved_data_set_reference = kwargs.get('saved_data_set_reference', None) self.asset_definition = kwargs.get('asset_definition', None) class DatasetIdentifier(msrest.serialization.Model): """DatasetIdentifier. :ivar saved_id: :vartype saved_id: str :ivar registered_id: :vartype registered_id: str :ivar registered_version: :vartype registered_version: str """ _attribute_map = { 'saved_id': {'key': 'savedId', 'type': 'str'}, 'registered_id': {'key': 'registeredId', 'type': 'str'}, 'registered_version': {'key': 'registeredVersion', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword saved_id: :paramtype saved_id: str :keyword registered_id: :paramtype registered_id: str :keyword registered_version: :paramtype registered_version: str """ super(DatasetIdentifier, self).__init__(**kwargs) self.saved_id = kwargs.get('saved_id', None) self.registered_id = kwargs.get('registered_id', None) self.registered_version = kwargs.get('registered_version', None) class DatasetInputDetails(msrest.serialization.Model): """DatasetInputDetails. :ivar input_name: :vartype input_name: str :ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs". :vartype mechanism: str or ~flow.models.DatasetDeliveryMechanism :ivar path_on_compute: :vartype path_on_compute: str """ _attribute_map = { 'input_name': {'key': 'inputName', 'type': 'str'}, 'mechanism': {'key': 'mechanism', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword input_name: :paramtype input_name: str :keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs". :paramtype mechanism: str or ~flow.models.DatasetDeliveryMechanism :keyword path_on_compute: :paramtype path_on_compute: str """ super(DatasetInputDetails, self).__init__(**kwargs) self.input_name = kwargs.get('input_name', None) self.mechanism = kwargs.get('mechanism', None) self.path_on_compute = kwargs.get('path_on_compute', None) class DatasetLineage(msrest.serialization.Model): """DatasetLineage. :ivar identifier: :vartype identifier: ~flow.models.DatasetIdentifier :ivar consumption_type: Possible values include: "RunInput", "Reference". :vartype consumption_type: str or ~flow.models.DatasetConsumptionType :ivar input_details: :vartype input_details: ~flow.models.DatasetInputDetails """ _attribute_map = { 'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'}, 'consumption_type': {'key': 'consumptionType', 'type': 'str'}, 'input_details': {'key': 'inputDetails', 'type': 'DatasetInputDetails'}, } def __init__( self, **kwargs ): """ :keyword identifier: :paramtype identifier: ~flow.models.DatasetIdentifier :keyword consumption_type: Possible values include: "RunInput", "Reference". :paramtype consumption_type: str or ~flow.models.DatasetConsumptionType :keyword input_details: :paramtype input_details: ~flow.models.DatasetInputDetails """ super(DatasetLineage, self).__init__(**kwargs) self.identifier = kwargs.get('identifier', None) self.consumption_type = kwargs.get('consumption_type', None) self.input_details = kwargs.get('input_details', None) class DatasetOutput(msrest.serialization.Model): """DatasetOutput. :ivar dataset_type: Possible values include: "File", "Tabular". :vartype dataset_type: str or ~flow.models.DatasetType :ivar dataset_registration: :vartype dataset_registration: ~flow.models.DatasetRegistration :ivar dataset_output_options: :vartype dataset_output_options: ~flow.models.DatasetOutputOptions """ _attribute_map = { 'dataset_type': {'key': 'datasetType', 'type': 'str'}, 'dataset_registration': {'key': 'datasetRegistration', 'type': 'DatasetRegistration'}, 'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'DatasetOutputOptions'}, } def __init__( self, **kwargs ): """ :keyword dataset_type: Possible values include: "File", "Tabular". :paramtype dataset_type: str or ~flow.models.DatasetType :keyword dataset_registration: :paramtype dataset_registration: ~flow.models.DatasetRegistration :keyword dataset_output_options: :paramtype dataset_output_options: ~flow.models.DatasetOutputOptions """ super(DatasetOutput, self).__init__(**kwargs) self.dataset_type = kwargs.get('dataset_type', None) self.dataset_registration = kwargs.get('dataset_registration', None) self.dataset_output_options = kwargs.get('dataset_output_options', None) class DatasetOutputDetails(msrest.serialization.Model): """DatasetOutputDetails. :ivar output_name: :vartype output_name: str """ _attribute_map = { 'output_name': {'key': 'outputName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword output_name: :paramtype output_name: str """ super(DatasetOutputDetails, self).__init__(**kwargs) self.output_name = kwargs.get('output_name', None) class DatasetOutputOptions(msrest.serialization.Model): """DatasetOutputOptions. :ivar source_globs: :vartype source_globs: ~flow.models.GlobsOptions :ivar path_on_datastore: :vartype path_on_datastore: str :ivar path_on_datastore_parameter_assignment: :vartype path_on_datastore_parameter_assignment: ~flow.models.ParameterAssignment """ _attribute_map = { 'source_globs': {'key': 'sourceGlobs', 'type': 'GlobsOptions'}, 'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'}, 'path_on_datastore_parameter_assignment': {'key': 'PathOnDatastoreParameterAssignment', 'type': 'ParameterAssignment'}, } def __init__( self, **kwargs ): """ :keyword source_globs: :paramtype source_globs: ~flow.models.GlobsOptions :keyword path_on_datastore: :paramtype path_on_datastore: str :keyword path_on_datastore_parameter_assignment: :paramtype path_on_datastore_parameter_assignment: ~flow.models.ParameterAssignment """ super(DatasetOutputOptions, self).__init__(**kwargs) self.source_globs = kwargs.get('source_globs', None) self.path_on_datastore = kwargs.get('path_on_datastore', None) self.path_on_datastore_parameter_assignment = kwargs.get('path_on_datastore_parameter_assignment', None) class DataSetPathParameter(msrest.serialization.Model): """DataSetPathParameter. :ivar name: :vartype name: str :ivar documentation: :vartype documentation: str :ivar default_value: :vartype default_value: ~flow.models.DataSetDefinitionValue :ivar is_optional: :vartype is_optional: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'documentation': {'key': 'documentation', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'DataSetDefinitionValue'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword documentation: :paramtype documentation: str :keyword default_value: :paramtype default_value: ~flow.models.DataSetDefinitionValue :keyword is_optional: :paramtype is_optional: bool """ super(DataSetPathParameter, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.documentation = kwargs.get('documentation', None) self.default_value = kwargs.get('default_value', None) self.is_optional = kwargs.get('is_optional', None) class DatasetRegistration(msrest.serialization.Model): """DatasetRegistration. :ivar name: :vartype name: str :ivar create_new_version: :vartype create_new_version: bool :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar additional_transformations: :vartype additional_transformations: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'create_new_version': {'key': 'createNewVersion', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword create_new_version: :paramtype create_new_version: bool :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword additional_transformations: :paramtype additional_transformations: str """ super(DatasetRegistration, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.create_new_version = kwargs.get('create_new_version', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.additional_transformations = kwargs.get('additional_transformations', None) class DatasetRegistrationOptions(msrest.serialization.Model): """DatasetRegistrationOptions. :ivar additional_transformation: :vartype additional_transformation: str """ _attribute_map = { 'additional_transformation': {'key': 'additionalTransformation', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword additional_transformation: :paramtype additional_transformation: str """ super(DatasetRegistrationOptions, self).__init__(**kwargs) self.additional_transformation = kwargs.get('additional_transformation', None) class DataSettings(msrest.serialization.Model): """DataSettings. :ivar target_column_name: :vartype target_column_name: str :ivar weight_column_name: :vartype weight_column_name: str :ivar positive_label: :vartype positive_label: str :ivar validation_data: :vartype validation_data: ~flow.models.ValidationDataSettings :ivar test_data: :vartype test_data: ~flow.models.TestDataSettings """ _attribute_map = { 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, 'positive_label': {'key': 'positiveLabel', 'type': 'str'}, 'validation_data': {'key': 'validationData', 'type': 'ValidationDataSettings'}, 'test_data': {'key': 'testData', 'type': 'TestDataSettings'}, } def __init__( self, **kwargs ): """ :keyword target_column_name: :paramtype target_column_name: str :keyword weight_column_name: :paramtype weight_column_name: str :keyword positive_label: :paramtype positive_label: str :keyword validation_data: :paramtype validation_data: ~flow.models.ValidationDataSettings :keyword test_data: :paramtype test_data: ~flow.models.TestDataSettings """ super(DataSettings, self).__init__(**kwargs) self.target_column_name = kwargs.get('target_column_name', None) self.weight_column_name = kwargs.get('weight_column_name', None) self.positive_label = kwargs.get('positive_label', None) self.validation_data = kwargs.get('validation_data', None) self.test_data = kwargs.get('test_data', None) class DatastoreSetting(msrest.serialization.Model): """DatastoreSetting. :ivar data_store_name: :vartype data_store_name: str """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str """ super(DatastoreSetting, self).__init__(**kwargs) self.data_store_name = kwargs.get('data_store_name', None) class DataTransferCloudConfiguration(msrest.serialization.Model): """DataTransferCloudConfiguration. :ivar allow_overwrite: :vartype allow_overwrite: bool """ _attribute_map = { 'allow_overwrite': {'key': 'AllowOverwrite', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword allow_overwrite: :paramtype allow_overwrite: bool """ super(DataTransferCloudConfiguration, self).__init__(**kwargs) self.allow_overwrite = kwargs.get('allow_overwrite', None) class DataTransferSink(msrest.serialization.Model): """DataTransferSink. :ivar type: Possible values include: "DataBase", "FileSystem". :vartype type: str or ~flow.models.DataTransferStorageType :ivar file_system: :vartype file_system: ~flow.models.FileSystem :ivar database_sink: :vartype database_sink: ~flow.models.DatabaseSink """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'file_system': {'key': 'fileSystem', 'type': 'FileSystem'}, 'database_sink': {'key': 'databaseSink', 'type': 'DatabaseSink'}, } def __init__( self, **kwargs ): """ :keyword type: Possible values include: "DataBase", "FileSystem". :paramtype type: str or ~flow.models.DataTransferStorageType :keyword file_system: :paramtype file_system: ~flow.models.FileSystem :keyword database_sink: :paramtype database_sink: ~flow.models.DatabaseSink """ super(DataTransferSink, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.file_system = kwargs.get('file_system', None) self.database_sink = kwargs.get('database_sink', None) class DataTransferSource(msrest.serialization.Model): """DataTransferSource. :ivar type: Possible values include: "DataBase", "FileSystem". :vartype type: str or ~flow.models.DataTransferStorageType :ivar file_system: :vartype file_system: ~flow.models.FileSystem :ivar database_source: :vartype database_source: ~flow.models.DatabaseSource """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'file_system': {'key': 'fileSystem', 'type': 'FileSystem'}, 'database_source': {'key': 'databaseSource', 'type': 'DatabaseSource'}, } def __init__( self, **kwargs ): """ :keyword type: Possible values include: "DataBase", "FileSystem". :paramtype type: str or ~flow.models.DataTransferStorageType :keyword file_system: :paramtype file_system: ~flow.models.FileSystem :keyword database_source: :paramtype database_source: ~flow.models.DatabaseSource """ super(DataTransferSource, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.file_system = kwargs.get('file_system', None) self.database_source = kwargs.get('database_source', None) class DataTransferV2CloudSetting(msrest.serialization.Model): """DataTransferV2CloudSetting. :ivar task_type: Possible values include: "ImportData", "ExportData", "CopyData". :vartype task_type: str or ~flow.models.DataTransferTaskType :ivar compute_name: :vartype compute_name: str :ivar copy_data_task: :vartype copy_data_task: ~flow.models.CopyDataTask :ivar import_data_task: :vartype import_data_task: ~flow.models.ImportDataTask :ivar export_data_task: :vartype export_data_task: ~flow.models.ExportDataTask :ivar data_transfer_sources: This is a dictionary. :vartype data_transfer_sources: dict[str, ~flow.models.DataTransferSource] :ivar data_transfer_sinks: This is a dictionary. :vartype data_transfer_sinks: dict[str, ~flow.models.DataTransferSink] :ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :vartype data_copy_mode: str or ~flow.models.DataCopyMode """ _attribute_map = { 'task_type': {'key': 'taskType', 'type': 'str'}, 'compute_name': {'key': 'ComputeName', 'type': 'str'}, 'copy_data_task': {'key': 'CopyDataTask', 'type': 'CopyDataTask'}, 'import_data_task': {'key': 'ImportDataTask', 'type': 'ImportDataTask'}, 'export_data_task': {'key': 'ExportDataTask', 'type': 'ExportDataTask'}, 'data_transfer_sources': {'key': 'DataTransferSources', 'type': '{DataTransferSource}'}, 'data_transfer_sinks': {'key': 'DataTransferSinks', 'type': '{DataTransferSink}'}, 'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword task_type: Possible values include: "ImportData", "ExportData", "CopyData". :paramtype task_type: str or ~flow.models.DataTransferTaskType :keyword compute_name: :paramtype compute_name: str :keyword copy_data_task: :paramtype copy_data_task: ~flow.models.CopyDataTask :keyword import_data_task: :paramtype import_data_task: ~flow.models.ImportDataTask :keyword export_data_task: :paramtype export_data_task: ~flow.models.ExportDataTask :keyword data_transfer_sources: This is a dictionary. :paramtype data_transfer_sources: dict[str, ~flow.models.DataTransferSource] :keyword data_transfer_sinks: This is a dictionary. :paramtype data_transfer_sinks: dict[str, ~flow.models.DataTransferSink] :keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :paramtype data_copy_mode: str or ~flow.models.DataCopyMode """ super(DataTransferV2CloudSetting, self).__init__(**kwargs) self.task_type = kwargs.get('task_type', None) self.compute_name = kwargs.get('compute_name', None) self.copy_data_task = kwargs.get('copy_data_task', None) self.import_data_task = kwargs.get('import_data_task', None) self.export_data_task = kwargs.get('export_data_task', None) self.data_transfer_sources = kwargs.get('data_transfer_sources', None) self.data_transfer_sinks = kwargs.get('data_transfer_sinks', None) self.data_copy_mode = kwargs.get('data_copy_mode', None) class DataTypeCreationInfo(msrest.serialization.Model): """DataTypeCreationInfo. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar is_directory: :vartype is_directory: bool :ivar file_extension: :vartype file_extension: str :ivar parent_data_type_ids: :vartype parent_data_type_ids: list[str] """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'is_directory': {'key': 'isDirectory', 'type': 'bool'}, 'file_extension': {'key': 'fileExtension', 'type': 'str'}, 'parent_data_type_ids': {'key': 'parentDataTypeIds', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword is_directory: :paramtype is_directory: bool :keyword file_extension: :paramtype file_extension: str :keyword parent_data_type_ids: :paramtype parent_data_type_ids: list[str] """ super(DataTypeCreationInfo, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.is_directory = kwargs.get('is_directory', None) self.file_extension = kwargs.get('file_extension', None) self.parent_data_type_ids = kwargs.get('parent_data_type_ids', None) class DBFSReference(msrest.serialization.Model): """DBFSReference. :ivar relative_path: :vartype relative_path: str :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword relative_path: :paramtype relative_path: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(DBFSReference, self).__init__(**kwargs) self.relative_path = kwargs.get('relative_path', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) class DbfsStorageInfoDto(msrest.serialization.Model): """DbfsStorageInfoDto. :ivar destination: :vartype destination: str """ _attribute_map = { 'destination': {'key': 'destination', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword destination: :paramtype destination: str """ super(DbfsStorageInfoDto, self).__init__(**kwargs) self.destination = kwargs.get('destination', None) class DebugInfoResponse(msrest.serialization.Model): """Internal debugging information not intended for external clients. :ivar type: The type. :vartype type: str :ivar message: The message. :vartype message: str :ivar stack_trace: The stack trace. :vartype stack_trace: str :ivar inner_exception: Internal debugging information not intended for external clients. :vartype inner_exception: ~flow.models.DebugInfoResponse :ivar data: This is a dictionary. :vartype data: dict[str, any] :ivar error_response: The error response. :vartype error_response: ~flow.models.ErrorResponse """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'stack_trace': {'key': 'stackTrace', 'type': 'str'}, 'inner_exception': {'key': 'innerException', 'type': 'DebugInfoResponse'}, 'data': {'key': 'data', 'type': '{object}'}, 'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'}, } def __init__( self, **kwargs ): """ :keyword type: The type. :paramtype type: str :keyword message: The message. :paramtype message: str :keyword stack_trace: The stack trace. :paramtype stack_trace: str :keyword inner_exception: Internal debugging information not intended for external clients. :paramtype inner_exception: ~flow.models.DebugInfoResponse :keyword data: This is a dictionary. :paramtype data: dict[str, any] :keyword error_response: The error response. :paramtype error_response: ~flow.models.ErrorResponse """ super(DebugInfoResponse, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.message = kwargs.get('message', None) self.stack_trace = kwargs.get('stack_trace', None) self.inner_exception = kwargs.get('inner_exception', None) self.data = kwargs.get('data', None) self.error_response = kwargs.get('error_response', None) class DeployFlowRequest(msrest.serialization.Model): """DeployFlowRequest. :ivar source_resource_id: :vartype source_resource_id: str :ivar source_flow_run_id: :vartype source_flow_run_id: str :ivar source_flow_id: :vartype source_flow_id: str :ivar flow: :vartype flow: ~flow.models.Flow :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar flow_submit_run_settings: :vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings :ivar output_names_included_in_endpoint_response: :vartype output_names_included_in_endpoint_response: list[str] :ivar endpoint_name: :vartype endpoint_name: str :ivar endpoint_description: :vartype endpoint_description: str :ivar auth_mode: Possible values include: "AMLToken", "Key", "AADToken". :vartype auth_mode: str or ~flow.models.EndpointAuthMode :ivar identity: :vartype identity: ~flow.models.ManagedServiceIdentity :ivar endpoint_tags: This is a dictionary. :vartype endpoint_tags: dict[str, str] :ivar connection_overrides: :vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting] :ivar use_workspace_connection: :vartype use_workspace_connection: bool :ivar deployment_name: :vartype deployment_name: str :ivar environment: :vartype environment: str :ivar environment_variables: This is a dictionary. :vartype environment_variables: dict[str, str] :ivar deployment_tags: This is a dictionary. :vartype deployment_tags: dict[str, str] :ivar app_insights_enabled: :vartype app_insights_enabled: bool :ivar enable_model_data_collector: :vartype enable_model_data_collector: bool :ivar skip_update_traffic_to_full: :vartype skip_update_traffic_to_full: bool :ivar enable_streaming_response: :vartype enable_streaming_response: bool :ivar use_flow_snapshot_to_deploy: :vartype use_flow_snapshot_to_deploy: bool :ivar instance_type: :vartype instance_type: str :ivar instance_count: :vartype instance_count: int :ivar auto_grant_connection_permission: :vartype auto_grant_connection_permission: bool """ _attribute_map = { 'source_resource_id': {'key': 'sourceResourceId', 'type': 'str'}, 'source_flow_run_id': {'key': 'sourceFlowRunId', 'type': 'str'}, 'source_flow_id': {'key': 'sourceFlowId', 'type': 'str'}, 'flow': {'key': 'flow', 'type': 'Flow'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'}, 'output_names_included_in_endpoint_response': {'key': 'outputNamesIncludedInEndpointResponse', 'type': '[str]'}, 'endpoint_name': {'key': 'endpointName', 'type': 'str'}, 'endpoint_description': {'key': 'endpointDescription', 'type': 'str'}, 'auth_mode': {'key': 'authMode', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, 'endpoint_tags': {'key': 'endpointTags', 'type': '{str}'}, 'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'}, 'use_workspace_connection': {'key': 'useWorkspaceConnection', 'type': 'bool'}, 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, 'environment': {'key': 'environment', 'type': 'str'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'deployment_tags': {'key': 'deploymentTags', 'type': '{str}'}, 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, 'enable_model_data_collector': {'key': 'enableModelDataCollector', 'type': 'bool'}, 'skip_update_traffic_to_full': {'key': 'skipUpdateTrafficToFull', 'type': 'bool'}, 'enable_streaming_response': {'key': 'enableStreamingResponse', 'type': 'bool'}, 'use_flow_snapshot_to_deploy': {'key': 'useFlowSnapshotToDeploy', 'type': 'bool'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'auto_grant_connection_permission': {'key': 'autoGrantConnectionPermission', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword source_resource_id: :paramtype source_resource_id: str :keyword source_flow_run_id: :paramtype source_flow_run_id: str :keyword source_flow_id: :paramtype source_flow_id: str :keyword flow: :paramtype flow: ~flow.models.Flow :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword flow_submit_run_settings: :paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings :keyword output_names_included_in_endpoint_response: :paramtype output_names_included_in_endpoint_response: list[str] :keyword endpoint_name: :paramtype endpoint_name: str :keyword endpoint_description: :paramtype endpoint_description: str :keyword auth_mode: Possible values include: "AMLToken", "Key", "AADToken". :paramtype auth_mode: str or ~flow.models.EndpointAuthMode :keyword identity: :paramtype identity: ~flow.models.ManagedServiceIdentity :keyword endpoint_tags: This is a dictionary. :paramtype endpoint_tags: dict[str, str] :keyword connection_overrides: :paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting] :keyword use_workspace_connection: :paramtype use_workspace_connection: bool :keyword deployment_name: :paramtype deployment_name: str :keyword environment: :paramtype environment: str :keyword environment_variables: This is a dictionary. :paramtype environment_variables: dict[str, str] :keyword deployment_tags: This is a dictionary. :paramtype deployment_tags: dict[str, str] :keyword app_insights_enabled: :paramtype app_insights_enabled: bool :keyword enable_model_data_collector: :paramtype enable_model_data_collector: bool :keyword skip_update_traffic_to_full: :paramtype skip_update_traffic_to_full: bool :keyword enable_streaming_response: :paramtype enable_streaming_response: bool :keyword use_flow_snapshot_to_deploy: :paramtype use_flow_snapshot_to_deploy: bool :keyword instance_type: :paramtype instance_type: str :keyword instance_count: :paramtype instance_count: int :keyword auto_grant_connection_permission: :paramtype auto_grant_connection_permission: bool """ super(DeployFlowRequest, self).__init__(**kwargs) self.source_resource_id = kwargs.get('source_resource_id', None) self.source_flow_run_id = kwargs.get('source_flow_run_id', None) self.source_flow_id = kwargs.get('source_flow_id', None) self.flow = kwargs.get('flow', None) self.flow_type = kwargs.get('flow_type', None) self.flow_submit_run_settings = kwargs.get('flow_submit_run_settings', None) self.output_names_included_in_endpoint_response = kwargs.get('output_names_included_in_endpoint_response', None) self.endpoint_name = kwargs.get('endpoint_name', None) self.endpoint_description = kwargs.get('endpoint_description', None) self.auth_mode = kwargs.get('auth_mode', None) self.identity = kwargs.get('identity', None) self.endpoint_tags = kwargs.get('endpoint_tags', None) self.connection_overrides = kwargs.get('connection_overrides', None) self.use_workspace_connection = kwargs.get('use_workspace_connection', None) self.deployment_name = kwargs.get('deployment_name', None) self.environment = kwargs.get('environment', None) self.environment_variables = kwargs.get('environment_variables', None) self.deployment_tags = kwargs.get('deployment_tags', None) self.app_insights_enabled = kwargs.get('app_insights_enabled', None) self.enable_model_data_collector = kwargs.get('enable_model_data_collector', None) self.skip_update_traffic_to_full = kwargs.get('skip_update_traffic_to_full', None) self.enable_streaming_response = kwargs.get('enable_streaming_response', None) self.use_flow_snapshot_to_deploy = kwargs.get('use_flow_snapshot_to_deploy', None) self.instance_type = kwargs.get('instance_type', None) self.instance_count = kwargs.get('instance_count', None) self.auto_grant_connection_permission = kwargs.get('auto_grant_connection_permission', None) class DeploymentInfo(msrest.serialization.Model): """DeploymentInfo. :ivar operation_id: :vartype operation_id: str :ivar service_id: :vartype service_id: str :ivar service_name: :vartype service_name: str :ivar status_detail: :vartype status_detail: str """ _attribute_map = { 'operation_id': {'key': 'operationId', 'type': 'str'}, 'service_id': {'key': 'serviceId', 'type': 'str'}, 'service_name': {'key': 'serviceName', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword operation_id: :paramtype operation_id: str :keyword service_id: :paramtype service_id: str :keyword service_name: :paramtype service_name: str :keyword status_detail: :paramtype status_detail: str """ super(DeploymentInfo, self).__init__(**kwargs) self.operation_id = kwargs.get('operation_id', None) self.service_id = kwargs.get('service_id', None) self.service_name = kwargs.get('service_name', None) self.status_detail = kwargs.get('status_detail', None) class DistributionConfiguration(msrest.serialization.Model): """DistributionConfiguration. :ivar distribution_type: Possible values include: "PyTorch", "TensorFlow", "Mpi", "Ray". :vartype distribution_type: str or ~flow.models.DistributionType """ _attribute_map = { 'distribution_type': {'key': 'distributionType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword distribution_type: Possible values include: "PyTorch", "TensorFlow", "Mpi", "Ray". :paramtype distribution_type: str or ~flow.models.DistributionType """ super(DistributionConfiguration, self).__init__(**kwargs) self.distribution_type = kwargs.get('distribution_type', None) class DistributionParameter(msrest.serialization.Model): """DistributionParameter. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar description: :vartype description: str :ivar input_type: Possible values include: "Text", "Number". :vartype input_type: str or ~flow.models.DistributionParameterEnum """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'input_type': {'key': 'inputType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword description: :paramtype description: str :keyword input_type: Possible values include: "Text", "Number". :paramtype input_type: str or ~flow.models.DistributionParameterEnum """ super(DistributionParameter, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.label = kwargs.get('label', None) self.description = kwargs.get('description', None) self.input_type = kwargs.get('input_type', None) class DockerBuildContext(msrest.serialization.Model): """DockerBuildContext. :ivar location_type: Possible values include: "Git", "StorageAccount". :vartype location_type: str or ~flow.models.BuildContextLocationType :ivar location: :vartype location: str :ivar dockerfile_path: :vartype dockerfile_path: str """ _attribute_map = { 'location_type': {'key': 'locationType', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'dockerfile_path': {'key': 'dockerfilePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword location_type: Possible values include: "Git", "StorageAccount". :paramtype location_type: str or ~flow.models.BuildContextLocationType :keyword location: :paramtype location: str :keyword dockerfile_path: :paramtype dockerfile_path: str """ super(DockerBuildContext, self).__init__(**kwargs) self.location_type = kwargs.get('location_type', None) self.location = kwargs.get('location', None) self.dockerfile_path = kwargs.get('dockerfile_path', "Dockerfile") class DockerConfiguration(msrest.serialization.Model): """DockerConfiguration. :ivar use_docker: :vartype use_docker: bool :ivar shared_volumes: :vartype shared_volumes: bool :ivar arguments: :vartype arguments: list[str] """ _attribute_map = { 'use_docker': {'key': 'useDocker', 'type': 'bool'}, 'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'}, 'arguments': {'key': 'arguments', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword use_docker: :paramtype use_docker: bool :keyword shared_volumes: :paramtype shared_volumes: bool :keyword arguments: :paramtype arguments: list[str] """ super(DockerConfiguration, self).__init__(**kwargs) self.use_docker = kwargs.get('use_docker', None) self.shared_volumes = kwargs.get('shared_volumes', None) self.arguments = kwargs.get('arguments', None) class DockerImagePlatform(msrest.serialization.Model): """DockerImagePlatform. :ivar os: :vartype os: str :ivar architecture: :vartype architecture: str """ _attribute_map = { 'os': {'key': 'os', 'type': 'str'}, 'architecture': {'key': 'architecture', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword os: :paramtype os: str :keyword architecture: :paramtype architecture: str """ super(DockerImagePlatform, self).__init__(**kwargs) self.os = kwargs.get('os', None) self.architecture = kwargs.get('architecture', None) class DockerSection(msrest.serialization.Model): """DockerSection. :ivar base_image: :vartype base_image: str :ivar platform: :vartype platform: ~flow.models.DockerImagePlatform :ivar base_dockerfile: :vartype base_dockerfile: str :ivar build_context: :vartype build_context: ~flow.models.DockerBuildContext :ivar base_image_registry: :vartype base_image_registry: ~flow.models.ContainerRegistry """ _attribute_map = { 'base_image': {'key': 'baseImage', 'type': 'str'}, 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'}, 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'}, 'build_context': {'key': 'buildContext', 'type': 'DockerBuildContext'}, 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'}, } def __init__( self, **kwargs ): """ :keyword base_image: :paramtype base_image: str :keyword platform: :paramtype platform: ~flow.models.DockerImagePlatform :keyword base_dockerfile: :paramtype base_dockerfile: str :keyword build_context: :paramtype build_context: ~flow.models.DockerBuildContext :keyword base_image_registry: :paramtype base_image_registry: ~flow.models.ContainerRegistry """ super(DockerSection, self).__init__(**kwargs) self.base_image = kwargs.get('base_image', None) self.platform = kwargs.get('platform', None) self.base_dockerfile = kwargs.get('base_dockerfile', None) self.build_context = kwargs.get('build_context', None) self.base_image_registry = kwargs.get('base_image_registry', None) class DockerSettingConfiguration(msrest.serialization.Model): """DockerSettingConfiguration. :ivar use_docker: :vartype use_docker: bool :ivar shared_volumes: :vartype shared_volumes: bool :ivar shm_size: :vartype shm_size: str :ivar arguments: :vartype arguments: list[str] """ _attribute_map = { 'use_docker': {'key': 'useDocker', 'type': 'bool'}, 'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'}, 'shm_size': {'key': 'shmSize', 'type': 'str'}, 'arguments': {'key': 'arguments', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword use_docker: :paramtype use_docker: bool :keyword shared_volumes: :paramtype shared_volumes: bool :keyword shm_size: :paramtype shm_size: str :keyword arguments: :paramtype arguments: list[str] """ super(DockerSettingConfiguration, self).__init__(**kwargs) self.use_docker = kwargs.get('use_docker', None) self.shared_volumes = kwargs.get('shared_volumes', None) self.shm_size = kwargs.get('shm_size', None) self.arguments = kwargs.get('arguments', None) class DoWhileControlFlowInfo(msrest.serialization.Model): """DoWhileControlFlowInfo. :ivar output_port_name_to_input_port_names_mapping: Dictionary of <componentsΒ·1sqg750Β·schemasΒ·dowhilecontrolflowinfoΒ·propertiesΒ·outputportnametoinputportnamesmappingΒ·additionalproperties>. :vartype output_port_name_to_input_port_names_mapping: dict[str, list[str]] :ivar condition_output_port_name: :vartype condition_output_port_name: str :ivar run_settings: :vartype run_settings: ~flow.models.DoWhileControlFlowRunSettings """ _attribute_map = { 'output_port_name_to_input_port_names_mapping': {'key': 'outputPortNameToInputPortNamesMapping', 'type': '{[str]}'}, 'condition_output_port_name': {'key': 'conditionOutputPortName', 'type': 'str'}, 'run_settings': {'key': 'runSettings', 'type': 'DoWhileControlFlowRunSettings'}, } def __init__( self, **kwargs ): """ :keyword output_port_name_to_input_port_names_mapping: Dictionary of <componentsΒ·1sqg750Β·schemasΒ·dowhilecontrolflowinfoΒ·propertiesΒ·outputportnametoinputportnamesmappingΒ·additionalproperties>. :paramtype output_port_name_to_input_port_names_mapping: dict[str, list[str]] :keyword condition_output_port_name: :paramtype condition_output_port_name: str :keyword run_settings: :paramtype run_settings: ~flow.models.DoWhileControlFlowRunSettings """ super(DoWhileControlFlowInfo, self).__init__(**kwargs) self.output_port_name_to_input_port_names_mapping = kwargs.get('output_port_name_to_input_port_names_mapping', None) self.condition_output_port_name = kwargs.get('condition_output_port_name', None) self.run_settings = kwargs.get('run_settings', None) class DoWhileControlFlowRunSettings(msrest.serialization.Model): """DoWhileControlFlowRunSettings. :ivar max_loop_iteration_count: :vartype max_loop_iteration_count: ~flow.models.ParameterAssignment """ _attribute_map = { 'max_loop_iteration_count': {'key': 'maxLoopIterationCount', 'type': 'ParameterAssignment'}, } def __init__( self, **kwargs ): """ :keyword max_loop_iteration_count: :paramtype max_loop_iteration_count: ~flow.models.ParameterAssignment """ super(DoWhileControlFlowRunSettings, self).__init__(**kwargs) self.max_loop_iteration_count = kwargs.get('max_loop_iteration_count', None) class DownloadResourceInfo(msrest.serialization.Model): """DownloadResourceInfo. :ivar download_url: :vartype download_url: str :ivar size: :vartype size: long """ _attribute_map = { 'download_url': {'key': 'downloadUrl', 'type': 'str'}, 'size': {'key': 'size', 'type': 'long'}, } def __init__( self, **kwargs ): """ :keyword download_url: :paramtype download_url: str :keyword size: :paramtype size: long """ super(DownloadResourceInfo, self).__init__(**kwargs) self.download_url = kwargs.get('download_url', None) self.size = kwargs.get('size', None) class EndpointSetting(msrest.serialization.Model): """EndpointSetting. :ivar type: :vartype type: str :ivar port: :vartype port: int :ivar ssl_thumbprint: :vartype ssl_thumbprint: str :ivar endpoint: :vartype endpoint: str :ivar proxy_endpoint: :vartype proxy_endpoint: str :ivar status: :vartype status: str :ivar error_message: :vartype error_message: str :ivar enabled: :vartype enabled: bool :ivar properties: Dictionary of :code:`<string>`. :vartype properties: dict[str, str] :ivar nodes: :vartype nodes: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'port': {'key': 'port', 'type': 'int'}, 'ssl_thumbprint': {'key': 'sslThumbprint', 'type': 'str'}, 'endpoint': {'key': 'endpoint', 'type': 'str'}, 'proxy_endpoint': {'key': 'proxyEndpoint', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'error_message': {'key': 'errorMessage', 'type': 'str'}, 'enabled': {'key': 'enabled', 'type': 'bool'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'nodes': {'key': 'nodes', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword type: :paramtype type: str :keyword port: :paramtype port: int :keyword ssl_thumbprint: :paramtype ssl_thumbprint: str :keyword endpoint: :paramtype endpoint: str :keyword proxy_endpoint: :paramtype proxy_endpoint: str :keyword status: :paramtype status: str :keyword error_message: :paramtype error_message: str :keyword enabled: :paramtype enabled: bool :keyword properties: Dictionary of :code:`<string>`. :paramtype properties: dict[str, str] :keyword nodes: :paramtype nodes: str """ super(EndpointSetting, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.port = kwargs.get('port', None) self.ssl_thumbprint = kwargs.get('ssl_thumbprint', None) self.endpoint = kwargs.get('endpoint', None) self.proxy_endpoint = kwargs.get('proxy_endpoint', None) self.status = kwargs.get('status', None) self.error_message = kwargs.get('error_message', None) self.enabled = kwargs.get('enabled', None) self.properties = kwargs.get('properties', None) self.nodes = kwargs.get('nodes', None) class EntityInterface(msrest.serialization.Model): """EntityInterface. :ivar parameters: :vartype parameters: list[~flow.models.Parameter] :ivar ports: :vartype ports: ~flow.models.NodePortInterface :ivar metadata_parameters: :vartype metadata_parameters: list[~flow.models.Parameter] :ivar data_path_parameters: :vartype data_path_parameters: list[~flow.models.DataPathParameter] :ivar data_path_parameter_list: :vartype data_path_parameter_list: list[~flow.models.DataSetPathParameter] :ivar asset_output_settings_parameter_list: :vartype asset_output_settings_parameter_list: list[~flow.models.AssetOutputSettingsParameter] """ _attribute_map = { 'parameters': {'key': 'parameters', 'type': '[Parameter]'}, 'ports': {'key': 'ports', 'type': 'NodePortInterface'}, 'metadata_parameters': {'key': 'metadataParameters', 'type': '[Parameter]'}, 'data_path_parameters': {'key': 'dataPathParameters', 'type': '[DataPathParameter]'}, 'data_path_parameter_list': {'key': 'dataPathParameterList', 'type': '[DataSetPathParameter]'}, 'asset_output_settings_parameter_list': {'key': 'AssetOutputSettingsParameterList', 'type': '[AssetOutputSettingsParameter]'}, } def __init__( self, **kwargs ): """ :keyword parameters: :paramtype parameters: list[~flow.models.Parameter] :keyword ports: :paramtype ports: ~flow.models.NodePortInterface :keyword metadata_parameters: :paramtype metadata_parameters: list[~flow.models.Parameter] :keyword data_path_parameters: :paramtype data_path_parameters: list[~flow.models.DataPathParameter] :keyword data_path_parameter_list: :paramtype data_path_parameter_list: list[~flow.models.DataSetPathParameter] :keyword asset_output_settings_parameter_list: :paramtype asset_output_settings_parameter_list: list[~flow.models.AssetOutputSettingsParameter] """ super(EntityInterface, self).__init__(**kwargs) self.parameters = kwargs.get('parameters', None) self.ports = kwargs.get('ports', None) self.metadata_parameters = kwargs.get('metadata_parameters', None) self.data_path_parameters = kwargs.get('data_path_parameters', None) self.data_path_parameter_list = kwargs.get('data_path_parameter_list', None) self.asset_output_settings_parameter_list = kwargs.get('asset_output_settings_parameter_list', None) class EntrySetting(msrest.serialization.Model): """EntrySetting. :ivar file: :vartype file: str :ivar class_name: :vartype class_name: str """ _attribute_map = { 'file': {'key': 'file', 'type': 'str'}, 'class_name': {'key': 'className', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword file: :paramtype file: str :keyword class_name: :paramtype class_name: str """ super(EntrySetting, self).__init__(**kwargs) self.file = kwargs.get('file', None) self.class_name = kwargs.get('class_name', None) class EnumParameterRule(msrest.serialization.Model): """EnumParameterRule. :ivar valid_values: :vartype valid_values: list[str] """ _attribute_map = { 'valid_values': {'key': 'validValues', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword valid_values: :paramtype valid_values: list[str] """ super(EnumParameterRule, self).__init__(**kwargs) self.valid_values = kwargs.get('valid_values', None) class EnvironmentConfiguration(msrest.serialization.Model): """EnvironmentConfiguration. :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar use_environment_definition: :vartype use_environment_definition: bool :ivar environment_definition_string: :vartype environment_definition_string: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'use_environment_definition': {'key': 'useEnvironmentDefinition', 'type': 'bool'}, 'environment_definition_string': {'key': 'environmentDefinitionString', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword use_environment_definition: :paramtype use_environment_definition: bool :keyword environment_definition_string: :paramtype environment_definition_string: str """ super(EnvironmentConfiguration, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) self.use_environment_definition = kwargs.get('use_environment_definition', None) self.environment_definition_string = kwargs.get('environment_definition_string', None) class EnvironmentDefinition(msrest.serialization.Model): """EnvironmentDefinition. :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar asset_id: :vartype asset_id: str :ivar auto_rebuild: :vartype auto_rebuild: bool :ivar python: :vartype python: ~flow.models.PythonSection :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar docker: :vartype docker: ~flow.models.DockerSection :ivar spark: :vartype spark: ~flow.models.SparkSection :ivar r: :vartype r: ~flow.models.RSection :ivar inferencing_stack_version: :vartype inferencing_stack_version: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, 'auto_rebuild': {'key': 'autoRebuild', 'type': 'bool'}, 'python': {'key': 'python', 'type': 'PythonSection'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'docker': {'key': 'docker', 'type': 'DockerSection'}, 'spark': {'key': 'spark', 'type': 'SparkSection'}, 'r': {'key': 'r', 'type': 'RSection'}, 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword asset_id: :paramtype asset_id: str :keyword auto_rebuild: :paramtype auto_rebuild: bool :keyword python: :paramtype python: ~flow.models.PythonSection :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword docker: :paramtype docker: ~flow.models.DockerSection :keyword spark: :paramtype spark: ~flow.models.SparkSection :keyword r: :paramtype r: ~flow.models.RSection :keyword inferencing_stack_version: :paramtype inferencing_stack_version: str """ super(EnvironmentDefinition, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) self.asset_id = kwargs.get('asset_id', None) self.auto_rebuild = kwargs.get('auto_rebuild', None) self.python = kwargs.get('python', None) self.environment_variables = kwargs.get('environment_variables', None) self.docker = kwargs.get('docker', None) self.spark = kwargs.get('spark', None) self.r = kwargs.get('r', None) self.inferencing_stack_version = kwargs.get('inferencing_stack_version', None) class EnvironmentDefinitionDto(msrest.serialization.Model): """EnvironmentDefinitionDto. :ivar environment_name: :vartype environment_name: str :ivar environment_version: :vartype environment_version: str :ivar intellectual_property_publisher: :vartype intellectual_property_publisher: str """ _attribute_map = { 'environment_name': {'key': 'environmentName', 'type': 'str'}, 'environment_version': {'key': 'environmentVersion', 'type': 'str'}, 'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword environment_name: :paramtype environment_name: str :keyword environment_version: :paramtype environment_version: str :keyword intellectual_property_publisher: :paramtype intellectual_property_publisher: str """ super(EnvironmentDefinitionDto, self).__init__(**kwargs) self.environment_name = kwargs.get('environment_name', None) self.environment_version = kwargs.get('environment_version', None) self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None) class EPRPipelineRunErrorClassificationRequest(msrest.serialization.Model): """EPRPipelineRunErrorClassificationRequest. :ivar root_run_id: :vartype root_run_id: str :ivar run_id: :vartype run_id: str :ivar task_result: :vartype task_result: str :ivar failure_type: :vartype failure_type: str :ivar failure_name: :vartype failure_name: str :ivar responsible_team: :vartype responsible_team: str """ _attribute_map = { 'root_run_id': {'key': 'rootRunId', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'task_result': {'key': 'taskResult', 'type': 'str'}, 'failure_type': {'key': 'failureType', 'type': 'str'}, 'failure_name': {'key': 'failureName', 'type': 'str'}, 'responsible_team': {'key': 'responsibleTeam', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword root_run_id: :paramtype root_run_id: str :keyword run_id: :paramtype run_id: str :keyword task_result: :paramtype task_result: str :keyword failure_type: :paramtype failure_type: str :keyword failure_name: :paramtype failure_name: str :keyword responsible_team: :paramtype responsible_team: str """ super(EPRPipelineRunErrorClassificationRequest, self).__init__(**kwargs) self.root_run_id = kwargs.get('root_run_id', None) self.run_id = kwargs.get('run_id', None) self.task_result = kwargs.get('task_result', None) self.failure_type = kwargs.get('failure_type', None) self.failure_name = kwargs.get('failure_name', None) self.responsible_team = kwargs.get('responsible_team', None) class ErrorAdditionalInfo(msrest.serialization.Model): """The resource management error additional info. :ivar type: The additional info type. :vartype type: str :ivar info: The additional info. :vartype info: any """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'info': {'key': 'info', 'type': 'object'}, } def __init__( self, **kwargs ): """ :keyword type: The additional info type. :paramtype type: str :keyword info: The additional info. :paramtype info: any """ super(ErrorAdditionalInfo, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.info = kwargs.get('info', None) class ErrorResponse(msrest.serialization.Model): """The error response. :ivar error: The root error. :vartype error: ~flow.models.RootError :ivar correlation: Dictionary containing correlation details for the error. :vartype correlation: dict[str, str] :ivar environment: The hosting environment. :vartype environment: str :ivar location: The Azure region. :vartype location: str :ivar time: The time in UTC. :vartype time: ~datetime.datetime :ivar component_name: Component name where error originated/encountered. :vartype component_name: str """ _attribute_map = { 'error': {'key': 'error', 'type': 'RootError'}, 'correlation': {'key': 'correlation', 'type': '{str}'}, 'environment': {'key': 'environment', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'time': {'key': 'time', 'type': 'iso-8601'}, 'component_name': {'key': 'componentName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword error: The root error. :paramtype error: ~flow.models.RootError :keyword correlation: Dictionary containing correlation details for the error. :paramtype correlation: dict[str, str] :keyword environment: The hosting environment. :paramtype environment: str :keyword location: The Azure region. :paramtype location: str :keyword time: The time in UTC. :paramtype time: ~datetime.datetime :keyword component_name: Component name where error originated/encountered. :paramtype component_name: str """ super(ErrorResponse, self).__init__(**kwargs) self.error = kwargs.get('error', None) self.correlation = kwargs.get('correlation', None) self.environment = kwargs.get('environment', None) self.location = kwargs.get('location', None) self.time = kwargs.get('time', None) self.component_name = kwargs.get('component_name', None) class EsCloudConfiguration(msrest.serialization.Model): """EsCloudConfiguration. :ivar enable_output_to_file_based_on_data_type_id: :vartype enable_output_to_file_based_on_data_type_id: bool :ivar environment: :vartype environment: ~flow.models.EnvironmentConfiguration :ivar hyper_drive_configuration: :vartype hyper_drive_configuration: ~flow.models.HyperDriveConfiguration :ivar k8_s_config: :vartype k8_s_config: ~flow.models.K8SConfiguration :ivar resource_config: :vartype resource_config: ~flow.models.AEVAResourceConfiguration :ivar torch_distributed_config: :vartype torch_distributed_config: ~flow.models.TorchDistributedConfiguration :ivar target_selector_config: :vartype target_selector_config: ~flow.models.TargetSelectorConfiguration :ivar docker_config: :vartype docker_config: ~flow.models.DockerSettingConfiguration :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar max_run_duration_seconds: :vartype max_run_duration_seconds: int :ivar identity: :vartype identity: ~flow.models.IdentitySetting :ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`. :vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration] :ivar run_config: :vartype run_config: str """ _attribute_map = { 'enable_output_to_file_based_on_data_type_id': {'key': 'enableOutputToFileBasedOnDataTypeId', 'type': 'bool'}, 'environment': {'key': 'environment', 'type': 'EnvironmentConfiguration'}, 'hyper_drive_configuration': {'key': 'hyperDriveConfiguration', 'type': 'HyperDriveConfiguration'}, 'k8_s_config': {'key': 'k8sConfig', 'type': 'K8SConfiguration'}, 'resource_config': {'key': 'resourceConfig', 'type': 'AEVAResourceConfiguration'}, 'torch_distributed_config': {'key': 'torchDistributedConfig', 'type': 'TorchDistributedConfiguration'}, 'target_selector_config': {'key': 'targetSelectorConfig', 'type': 'TargetSelectorConfiguration'}, 'docker_config': {'key': 'dockerConfig', 'type': 'DockerSettingConfiguration'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'int'}, 'identity': {'key': 'identity', 'type': 'IdentitySetting'}, 'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'}, 'run_config': {'key': 'runConfig', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword enable_output_to_file_based_on_data_type_id: :paramtype enable_output_to_file_based_on_data_type_id: bool :keyword environment: :paramtype environment: ~flow.models.EnvironmentConfiguration :keyword hyper_drive_configuration: :paramtype hyper_drive_configuration: ~flow.models.HyperDriveConfiguration :keyword k8_s_config: :paramtype k8_s_config: ~flow.models.K8SConfiguration :keyword resource_config: :paramtype resource_config: ~flow.models.AEVAResourceConfiguration :keyword torch_distributed_config: :paramtype torch_distributed_config: ~flow.models.TorchDistributedConfiguration :keyword target_selector_config: :paramtype target_selector_config: ~flow.models.TargetSelectorConfiguration :keyword docker_config: :paramtype docker_config: ~flow.models.DockerSettingConfiguration :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword max_run_duration_seconds: :paramtype max_run_duration_seconds: int :keyword identity: :paramtype identity: ~flow.models.IdentitySetting :keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`. :paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration] :keyword run_config: :paramtype run_config: str """ super(EsCloudConfiguration, self).__init__(**kwargs) self.enable_output_to_file_based_on_data_type_id = kwargs.get('enable_output_to_file_based_on_data_type_id', None) self.environment = kwargs.get('environment', None) self.hyper_drive_configuration = kwargs.get('hyper_drive_configuration', None) self.k8_s_config = kwargs.get('k8_s_config', None) self.resource_config = kwargs.get('resource_config', None) self.torch_distributed_config = kwargs.get('torch_distributed_config', None) self.target_selector_config = kwargs.get('target_selector_config', None) self.docker_config = kwargs.get('docker_config', None) self.environment_variables = kwargs.get('environment_variables', None) self.max_run_duration_seconds = kwargs.get('max_run_duration_seconds', None) self.identity = kwargs.get('identity', None) self.application_endpoints = kwargs.get('application_endpoints', None) self.run_config = kwargs.get('run_config', None) class EvaluationFlowRunSettings(msrest.serialization.Model): """EvaluationFlowRunSettings. :ivar flow_run_id: :vartype flow_run_id: str :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar inputs_mapping: This is a dictionary. :vartype inputs_mapping: dict[str, str] :ivar data_inputs: This is a dictionary. :vartype data_inputs: dict[str, str] :ivar connection_overrides: :vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting] :ivar runtime_name: :vartype runtime_name: str :ivar aml_compute_name: :vartype aml_compute_name: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'flow_run_id': {'key': 'flowRunId', 'type': 'str'}, 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'}, 'data_inputs': {'key': 'dataInputs', 'type': '{str}'}, 'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword flow_run_id: :paramtype flow_run_id: str :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword inputs_mapping: This is a dictionary. :paramtype inputs_mapping: dict[str, str] :keyword data_inputs: This is a dictionary. :paramtype data_inputs: dict[str, str] :keyword connection_overrides: :paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting] :keyword runtime_name: :paramtype runtime_name: str :keyword aml_compute_name: :paramtype aml_compute_name: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(EvaluationFlowRunSettings, self).__init__(**kwargs) self.flow_run_id = kwargs.get('flow_run_id', None) self.flow_run_display_name = kwargs.get('flow_run_display_name', None) self.batch_data_input = kwargs.get('batch_data_input', None) self.inputs_mapping = kwargs.get('inputs_mapping', None) self.data_inputs = kwargs.get('data_inputs', None) self.connection_overrides = kwargs.get('connection_overrides', None) self.runtime_name = kwargs.get('runtime_name', None) self.aml_compute_name = kwargs.get('aml_compute_name', None) self.properties = kwargs.get('properties', None) class ExampleRequest(msrest.serialization.Model): """ExampleRequest. :ivar inputs: This is a dictionary. :vartype inputs: dict[str, list[list[any]]] :ivar global_parameters: This is a dictionary. :vartype global_parameters: dict[str, any] """ _attribute_map = { 'inputs': {'key': 'inputs', 'type': '{[[object]]}'}, 'global_parameters': {'key': 'globalParameters', 'type': '{object}'}, } def __init__( self, **kwargs ): """ :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, list[list[any]]] :keyword global_parameters: This is a dictionary. :paramtype global_parameters: dict[str, any] """ super(ExampleRequest, self).__init__(**kwargs) self.inputs = kwargs.get('inputs', None) self.global_parameters = kwargs.get('global_parameters', None) class ExecutionContextDto(msrest.serialization.Model): """ExecutionContextDto. :ivar executable: :vartype executable: str :ivar user_code: :vartype user_code: str :ivar arguments: :vartype arguments: str """ _attribute_map = { 'executable': {'key': 'executable', 'type': 'str'}, 'user_code': {'key': 'userCode', 'type': 'str'}, 'arguments': {'key': 'arguments', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword executable: :paramtype executable: str :keyword user_code: :paramtype user_code: str :keyword arguments: :paramtype arguments: str """ super(ExecutionContextDto, self).__init__(**kwargs) self.executable = kwargs.get('executable', None) self.user_code = kwargs.get('user_code', None) self.arguments = kwargs.get('arguments', None) class ExecutionDataLocation(msrest.serialization.Model): """ExecutionDataLocation. :ivar dataset: :vartype dataset: ~flow.models.RunDatasetReference :ivar data_path: :vartype data_path: ~flow.models.ExecutionDataPath :ivar uri: :vartype uri: ~flow.models.UriReference :ivar type: :vartype type: str """ _attribute_map = { 'dataset': {'key': 'dataset', 'type': 'RunDatasetReference'}, 'data_path': {'key': 'dataPath', 'type': 'ExecutionDataPath'}, 'uri': {'key': 'uri', 'type': 'UriReference'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword dataset: :paramtype dataset: ~flow.models.RunDatasetReference :keyword data_path: :paramtype data_path: ~flow.models.ExecutionDataPath :keyword uri: :paramtype uri: ~flow.models.UriReference :keyword type: :paramtype type: str """ super(ExecutionDataLocation, self).__init__(**kwargs) self.dataset = kwargs.get('dataset', None) self.data_path = kwargs.get('data_path', None) self.uri = kwargs.get('uri', None) self.type = kwargs.get('type', None) class ExecutionDataPath(msrest.serialization.Model): """ExecutionDataPath. :ivar datastore_name: :vartype datastore_name: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'datastore_name': {'key': 'datastoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword datastore_name: :paramtype datastore_name: str :keyword relative_path: :paramtype relative_path: str """ super(ExecutionDataPath, self).__init__(**kwargs) self.datastore_name = kwargs.get('datastore_name', None) self.relative_path = kwargs.get('relative_path', None) class ExecutionGlobsOptions(msrest.serialization.Model): """ExecutionGlobsOptions. :ivar glob_patterns: :vartype glob_patterns: list[str] """ _attribute_map = { 'glob_patterns': {'key': 'globPatterns', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword glob_patterns: :paramtype glob_patterns: list[str] """ super(ExecutionGlobsOptions, self).__init__(**kwargs) self.glob_patterns = kwargs.get('glob_patterns', None) class ExperimentComputeMetaInfo(msrest.serialization.Model): """ExperimentComputeMetaInfo. :ivar current_node_count: :vartype current_node_count: int :ivar target_node_count: :vartype target_node_count: int :ivar max_node_count: :vartype max_node_count: int :ivar min_node_count: :vartype min_node_count: int :ivar idle_node_count: :vartype idle_node_count: int :ivar running_node_count: :vartype running_node_count: int :ivar preparing_node_count: :vartype preparing_node_count: int :ivar unusable_node_count: :vartype unusable_node_count: int :ivar leaving_node_count: :vartype leaving_node_count: int :ivar preempted_node_count: :vartype preempted_node_count: int :ivar vm_size: :vartype vm_size: str :ivar location: :vartype location: str :ivar provisioning_state: :vartype provisioning_state: str :ivar state: :vartype state: str :ivar os_type: :vartype os_type: str :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar created_by_studio: :vartype created_by_studio: bool :ivar is_gpu_type: :vartype is_gpu_type: bool :ivar resource_id: :vartype resource_id: str :ivar compute_type: :vartype compute_type: str """ _attribute_map = { 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'}, 'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword current_node_count: :paramtype current_node_count: int :keyword target_node_count: :paramtype target_node_count: int :keyword max_node_count: :paramtype max_node_count: int :keyword min_node_count: :paramtype min_node_count: int :keyword idle_node_count: :paramtype idle_node_count: int :keyword running_node_count: :paramtype running_node_count: int :keyword preparing_node_count: :paramtype preparing_node_count: int :keyword unusable_node_count: :paramtype unusable_node_count: int :keyword leaving_node_count: :paramtype leaving_node_count: int :keyword preempted_node_count: :paramtype preempted_node_count: int :keyword vm_size: :paramtype vm_size: str :keyword location: :paramtype location: str :keyword provisioning_state: :paramtype provisioning_state: str :keyword state: :paramtype state: str :keyword os_type: :paramtype os_type: str :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword created_by_studio: :paramtype created_by_studio: bool :keyword is_gpu_type: :paramtype is_gpu_type: bool :keyword resource_id: :paramtype resource_id: str :keyword compute_type: :paramtype compute_type: str """ super(ExperimentComputeMetaInfo, self).__init__(**kwargs) self.current_node_count = kwargs.get('current_node_count', None) self.target_node_count = kwargs.get('target_node_count', None) self.max_node_count = kwargs.get('max_node_count', None) self.min_node_count = kwargs.get('min_node_count', None) self.idle_node_count = kwargs.get('idle_node_count', None) self.running_node_count = kwargs.get('running_node_count', None) self.preparing_node_count = kwargs.get('preparing_node_count', None) self.unusable_node_count = kwargs.get('unusable_node_count', None) self.leaving_node_count = kwargs.get('leaving_node_count', None) self.preempted_node_count = kwargs.get('preempted_node_count', None) self.vm_size = kwargs.get('vm_size', None) self.location = kwargs.get('location', None) self.provisioning_state = kwargs.get('provisioning_state', None) self.state = kwargs.get('state', None) self.os_type = kwargs.get('os_type', None) self.id = kwargs.get('id', None) self.name = kwargs.get('name', None) self.created_by_studio = kwargs.get('created_by_studio', None) self.is_gpu_type = kwargs.get('is_gpu_type', None) self.resource_id = kwargs.get('resource_id', None) self.compute_type = kwargs.get('compute_type', None) class ExperimentInfo(msrest.serialization.Model): """ExperimentInfo. :ivar experiment_name: :vartype experiment_name: str :ivar experiment_id: :vartype experiment_id: str """ _attribute_map = { 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword experiment_name: :paramtype experiment_name: str :keyword experiment_id: :paramtype experiment_id: str """ super(ExperimentInfo, self).__init__(**kwargs) self.experiment_name = kwargs.get('experiment_name', None) self.experiment_id = kwargs.get('experiment_id', None) class ExportComponentMetaInfo(msrest.serialization.Model): """ExportComponentMetaInfo. :ivar module_entity: :vartype module_entity: ~flow.models.ModuleEntity :ivar module_version: :vartype module_version: str :ivar is_anonymous: :vartype is_anonymous: bool """ _attribute_map = { 'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'}, 'module_version': {'key': 'moduleVersion', 'type': 'str'}, 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword module_entity: :paramtype module_entity: ~flow.models.ModuleEntity :keyword module_version: :paramtype module_version: str :keyword is_anonymous: :paramtype is_anonymous: bool """ super(ExportComponentMetaInfo, self).__init__(**kwargs) self.module_entity = kwargs.get('module_entity', None) self.module_version = kwargs.get('module_version', None) self.is_anonymous = kwargs.get('is_anonymous', None) class ExportDataTask(msrest.serialization.Model): """ExportDataTask. :ivar data_transfer_sink: :vartype data_transfer_sink: ~flow.models.DataTransferSink """ _attribute_map = { 'data_transfer_sink': {'key': 'DataTransferSink', 'type': 'DataTransferSink'}, } def __init__( self, **kwargs ): """ :keyword data_transfer_sink: :paramtype data_transfer_sink: ~flow.models.DataTransferSink """ super(ExportDataTask, self).__init__(**kwargs) self.data_transfer_sink = kwargs.get('data_transfer_sink', None) class FeaturizationSettings(msrest.serialization.Model): """FeaturizationSettings. :ivar mode: Possible values include: "Auto", "Custom", "Off". :vartype mode: str or ~flow.models.FeaturizationMode :ivar blocked_transformers: :vartype blocked_transformers: list[str] :ivar column_purposes: Dictionary of :code:`<string>`. :vartype column_purposes: dict[str, str] :ivar drop_columns: :vartype drop_columns: list[str] :ivar transformer_params: Dictionary of <componentsΒ·1gi3krmΒ·schemasΒ·featurizationsettingsΒ·propertiesΒ·transformerparamsΒ·additionalproperties>. :vartype transformer_params: dict[str, list[~flow.models.ColumnTransformer]] :ivar dataset_language: :vartype dataset_language: str :ivar enable_dnn_featurization: :vartype enable_dnn_featurization: bool """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'}, 'column_purposes': {'key': 'columnPurposes', 'type': '{str}'}, 'drop_columns': {'key': 'dropColumns', 'type': '[str]'}, 'transformer_params': {'key': 'transformerParams', 'type': '{[ColumnTransformer]}'}, 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, 'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom", "Off". :paramtype mode: str or ~flow.models.FeaturizationMode :keyword blocked_transformers: :paramtype blocked_transformers: list[str] :keyword column_purposes: Dictionary of :code:`<string>`. :paramtype column_purposes: dict[str, str] :keyword drop_columns: :paramtype drop_columns: list[str] :keyword transformer_params: Dictionary of <componentsΒ·1gi3krmΒ·schemasΒ·featurizationsettingsΒ·propertiesΒ·transformerparamsΒ·additionalproperties>. :paramtype transformer_params: dict[str, list[~flow.models.ColumnTransformer]] :keyword dataset_language: :paramtype dataset_language: str :keyword enable_dnn_featurization: :paramtype enable_dnn_featurization: bool """ super(FeaturizationSettings, self).__init__(**kwargs) self.mode = kwargs.get('mode', None) self.blocked_transformers = kwargs.get('blocked_transformers', None) self.column_purposes = kwargs.get('column_purposes', None) self.drop_columns = kwargs.get('drop_columns', None) self.transformer_params = kwargs.get('transformer_params', None) self.dataset_language = kwargs.get('dataset_language', None) self.enable_dnn_featurization = kwargs.get('enable_dnn_featurization', None) class FeedDto(msrest.serialization.Model): """FeedDto. :ivar name: :vartype name: str :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar sharing_scopes: :vartype sharing_scopes: list[~flow.models.SharingScope] :ivar supported_asset_types: :vartype supported_asset_types: ~flow.models.FeedDtoSupportedAssetTypes :ivar regional_workspace_storage: This is a dictionary. :vartype regional_workspace_storage: dict[str, list[str]] :ivar intellectual_property_publisher: :vartype intellectual_property_publisher: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'sharing_scopes': {'key': 'sharingScopes', 'type': '[SharingScope]'}, 'supported_asset_types': {'key': 'supportedAssetTypes', 'type': 'FeedDtoSupportedAssetTypes'}, 'regional_workspace_storage': {'key': 'regionalWorkspaceStorage', 'type': '{[str]}'}, 'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword sharing_scopes: :paramtype sharing_scopes: list[~flow.models.SharingScope] :keyword supported_asset_types: :paramtype supported_asset_types: ~flow.models.FeedDtoSupportedAssetTypes :keyword regional_workspace_storage: This is a dictionary. :paramtype regional_workspace_storage: dict[str, list[str]] :keyword intellectual_property_publisher: :paramtype intellectual_property_publisher: str """ super(FeedDto, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.display_name = kwargs.get('display_name', None) self.description = kwargs.get('description', None) self.sharing_scopes = kwargs.get('sharing_scopes', None) self.supported_asset_types = kwargs.get('supported_asset_types', None) self.regional_workspace_storage = kwargs.get('regional_workspace_storage', None) self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None) class FeedDtoSupportedAssetTypes(msrest.serialization.Model): """FeedDtoSupportedAssetTypes. :ivar component: :vartype component: ~flow.models.AssetTypeMetaInfo :ivar model: :vartype model: ~flow.models.AssetTypeMetaInfo :ivar environment: :vartype environment: ~flow.models.AssetTypeMetaInfo :ivar dataset: :vartype dataset: ~flow.models.AssetTypeMetaInfo :ivar data_store: :vartype data_store: ~flow.models.AssetTypeMetaInfo :ivar sample_graph: :vartype sample_graph: ~flow.models.AssetTypeMetaInfo :ivar flow_tool: :vartype flow_tool: ~flow.models.AssetTypeMetaInfo :ivar flow_tool_setting: :vartype flow_tool_setting: ~flow.models.AssetTypeMetaInfo :ivar flow_connection: :vartype flow_connection: ~flow.models.AssetTypeMetaInfo :ivar flow_sample: :vartype flow_sample: ~flow.models.AssetTypeMetaInfo :ivar flow_runtime_spec: :vartype flow_runtime_spec: ~flow.models.AssetTypeMetaInfo """ _attribute_map = { 'component': {'key': 'Component', 'type': 'AssetTypeMetaInfo'}, 'model': {'key': 'Model', 'type': 'AssetTypeMetaInfo'}, 'environment': {'key': 'Environment', 'type': 'AssetTypeMetaInfo'}, 'dataset': {'key': 'Dataset', 'type': 'AssetTypeMetaInfo'}, 'data_store': {'key': 'DataStore', 'type': 'AssetTypeMetaInfo'}, 'sample_graph': {'key': 'SampleGraph', 'type': 'AssetTypeMetaInfo'}, 'flow_tool': {'key': 'FlowTool', 'type': 'AssetTypeMetaInfo'}, 'flow_tool_setting': {'key': 'FlowToolSetting', 'type': 'AssetTypeMetaInfo'}, 'flow_connection': {'key': 'FlowConnection', 'type': 'AssetTypeMetaInfo'}, 'flow_sample': {'key': 'FlowSample', 'type': 'AssetTypeMetaInfo'}, 'flow_runtime_spec': {'key': 'FlowRuntimeSpec', 'type': 'AssetTypeMetaInfo'}, } def __init__( self, **kwargs ): """ :keyword component: :paramtype component: ~flow.models.AssetTypeMetaInfo :keyword model: :paramtype model: ~flow.models.AssetTypeMetaInfo :keyword environment: :paramtype environment: ~flow.models.AssetTypeMetaInfo :keyword dataset: :paramtype dataset: ~flow.models.AssetTypeMetaInfo :keyword data_store: :paramtype data_store: ~flow.models.AssetTypeMetaInfo :keyword sample_graph: :paramtype sample_graph: ~flow.models.AssetTypeMetaInfo :keyword flow_tool: :paramtype flow_tool: ~flow.models.AssetTypeMetaInfo :keyword flow_tool_setting: :paramtype flow_tool_setting: ~flow.models.AssetTypeMetaInfo :keyword flow_connection: :paramtype flow_connection: ~flow.models.AssetTypeMetaInfo :keyword flow_sample: :paramtype flow_sample: ~flow.models.AssetTypeMetaInfo :keyword flow_runtime_spec: :paramtype flow_runtime_spec: ~flow.models.AssetTypeMetaInfo """ super(FeedDtoSupportedAssetTypes, self).__init__(**kwargs) self.component = kwargs.get('component', None) self.model = kwargs.get('model', None) self.environment = kwargs.get('environment', None) self.dataset = kwargs.get('dataset', None) self.data_store = kwargs.get('data_store', None) self.sample_graph = kwargs.get('sample_graph', None) self.flow_tool = kwargs.get('flow_tool', None) self.flow_tool_setting = kwargs.get('flow_tool_setting', None) self.flow_connection = kwargs.get('flow_connection', None) self.flow_sample = kwargs.get('flow_sample', None) self.flow_runtime_spec = kwargs.get('flow_runtime_spec', None) class FileSystem(msrest.serialization.Model): """FileSystem. :ivar connection: :vartype connection: str :ivar path: :vartype path: str """ _attribute_map = { 'connection': {'key': 'connection', 'type': 'str'}, 'path': {'key': 'path', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword connection: :paramtype connection: str :keyword path: :paramtype path: str """ super(FileSystem, self).__init__(**kwargs) self.connection = kwargs.get('connection', None) self.path = kwargs.get('path', None) class Flow(msrest.serialization.Model): """Flow. :ivar source_resource_id: :vartype source_resource_id: str :ivar flow_graph: :vartype flow_graph: ~flow.models.FlowGraph :ivar node_variants: This is a dictionary. :vartype node_variants: dict[str, ~flow.models.NodeVariant] :ivar flow_graph_layout: :vartype flow_graph_layout: ~flow.models.FlowGraphLayout :ivar bulk_test_data: This is a dictionary. :vartype bulk_test_data: dict[str, str] :ivar evaluation_flows: This is a dictionary. :vartype evaluation_flows: dict[str, ~flow.models.FlowGraphReference] """ _attribute_map = { 'source_resource_id': {'key': 'sourceResourceId', 'type': 'str'}, 'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'}, 'node_variants': {'key': 'nodeVariants', 'type': '{NodeVariant}'}, 'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'}, 'bulk_test_data': {'key': 'bulkTestData', 'type': '{str}'}, 'evaluation_flows': {'key': 'evaluationFlows', 'type': '{FlowGraphReference}'}, } def __init__( self, **kwargs ): """ :keyword source_resource_id: :paramtype source_resource_id: str :keyword flow_graph: :paramtype flow_graph: ~flow.models.FlowGraph :keyword node_variants: This is a dictionary. :paramtype node_variants: dict[str, ~flow.models.NodeVariant] :keyword flow_graph_layout: :paramtype flow_graph_layout: ~flow.models.FlowGraphLayout :keyword bulk_test_data: This is a dictionary. :paramtype bulk_test_data: dict[str, str] :keyword evaluation_flows: This is a dictionary. :paramtype evaluation_flows: dict[str, ~flow.models.FlowGraphReference] """ super(Flow, self).__init__(**kwargs) self.source_resource_id = kwargs.get('source_resource_id', None) self.flow_graph = kwargs.get('flow_graph', None) self.node_variants = kwargs.get('node_variants', None) self.flow_graph_layout = kwargs.get('flow_graph_layout', None) self.bulk_test_data = kwargs.get('bulk_test_data', None) self.evaluation_flows = kwargs.get('evaluation_flows', None) class FlowAnnotations(msrest.serialization.Model): """FlowAnnotations. :ivar flow_name: :vartype flow_name: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy :ivar is_archived: :vartype is_archived: bool :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar archived: :vartype archived: bool :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] """ _attribute_map = { 'flow_name': {'key': 'flowName', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'archived': {'key': 'archived', 'type': 'bool'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword flow_name: :paramtype flow_name: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy :keyword is_archived: :paramtype is_archived: bool :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword archived: :paramtype archived: bool :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] """ super(FlowAnnotations, self).__init__(**kwargs) self.flow_name = kwargs.get('flow_name', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) self.owner = kwargs.get('owner', None) self.is_archived = kwargs.get('is_archived', None) self.vm_size = kwargs.get('vm_size', None) self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.archived = kwargs.get('archived', None) self.tags = kwargs.get('tags', None) class FlowBaseDto(msrest.serialization.Model): """FlowBaseDto. :ivar flow_id: :vartype flow_id: str :ivar flow_name: :vartype flow_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar experiment_id: :vartype experiment_id: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy :ivar flow_resource_id: :vartype flow_resource_id: str :ivar is_archived: :vartype is_archived: bool :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar identity: :vartype identity: str """ _attribute_map = { 'flow_id': {'key': 'flowId', 'type': 'str'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, 'flow_resource_id': {'key': 'flowResourceId', 'type': 'str'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword flow_id: :paramtype flow_id: str :keyword flow_name: :paramtype flow_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword experiment_id: :paramtype experiment_id: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy :keyword flow_resource_id: :paramtype flow_resource_id: str :keyword is_archived: :paramtype is_archived: bool :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword identity: :paramtype identity: str """ super(FlowBaseDto, self).__init__(**kwargs) self.flow_id = kwargs.get('flow_id', None) self.flow_name = kwargs.get('flow_name', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.flow_type = kwargs.get('flow_type', None) self.experiment_id = kwargs.get('experiment_id', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) self.owner = kwargs.get('owner', None) self.flow_resource_id = kwargs.get('flow_resource_id', None) self.is_archived = kwargs.get('is_archived', None) self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None) self.vm_size = kwargs.get('vm_size', None) self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None) self.identity = kwargs.get('identity', None) class FlowDto(msrest.serialization.Model): """FlowDto. :ivar timestamp: :vartype timestamp: ~datetime.datetime :ivar e_tag: Any object. :vartype e_tag: any :ivar flow: :vartype flow: ~flow.models.Flow :ivar flow_run_settings: :vartype flow_run_settings: ~flow.models.FlowRunSettings :ivar flow_run_result: :vartype flow_run_result: ~flow.models.FlowRunResult :ivar flow_test_mode: Possible values include: "Sync", "Async". :vartype flow_test_mode: str or ~flow.models.FlowTestMode :ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :ivar studio_portal_endpoint: :vartype studio_portal_endpoint: str :ivar flow_id: :vartype flow_id: str :ivar flow_name: :vartype flow_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar experiment_id: :vartype experiment_id: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy :ivar flow_resource_id: :vartype flow_resource_id: str :ivar is_archived: :vartype is_archived: bool :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar identity: :vartype identity: str """ _attribute_map = { 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, 'e_tag': {'key': 'eTag', 'type': 'object'}, 'flow': {'key': 'flow', 'type': 'Flow'}, 'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'}, 'flow_run_result': {'key': 'flowRunResult', 'type': 'FlowRunResult'}, 'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'}, 'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'}, 'studio_portal_endpoint': {'key': 'studioPortalEndpoint', 'type': 'str'}, 'flow_id': {'key': 'flowId', 'type': 'str'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, 'flow_resource_id': {'key': 'flowResourceId', 'type': 'str'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword timestamp: :paramtype timestamp: ~datetime.datetime :keyword e_tag: Any object. :paramtype e_tag: any :keyword flow: :paramtype flow: ~flow.models.Flow :keyword flow_run_settings: :paramtype flow_run_settings: ~flow.models.FlowRunSettings :keyword flow_run_result: :paramtype flow_run_result: ~flow.models.FlowRunResult :keyword flow_test_mode: Possible values include: "Sync", "Async". :paramtype flow_test_mode: str or ~flow.models.FlowTestMode :keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :keyword studio_portal_endpoint: :paramtype studio_portal_endpoint: str :keyword flow_id: :paramtype flow_id: str :keyword flow_name: :paramtype flow_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword experiment_id: :paramtype experiment_id: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy :keyword flow_resource_id: :paramtype flow_resource_id: str :keyword is_archived: :paramtype is_archived: bool :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword identity: :paramtype identity: str """ super(FlowDto, self).__init__(**kwargs) self.timestamp = kwargs.get('timestamp', None) self.e_tag = kwargs.get('e_tag', None) self.flow = kwargs.get('flow', None) self.flow_run_settings = kwargs.get('flow_run_settings', None) self.flow_run_result = kwargs.get('flow_run_result', None) self.flow_test_mode = kwargs.get('flow_test_mode', None) self.flow_test_infos = kwargs.get('flow_test_infos', None) self.studio_portal_endpoint = kwargs.get('studio_portal_endpoint', None) self.flow_id = kwargs.get('flow_id', None) self.flow_name = kwargs.get('flow_name', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.flow_type = kwargs.get('flow_type', None) self.experiment_id = kwargs.get('experiment_id', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) self.owner = kwargs.get('owner', None) self.flow_resource_id = kwargs.get('flow_resource_id', None) self.is_archived = kwargs.get('is_archived', None) self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None) self.vm_size = kwargs.get('vm_size', None) self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None) self.identity = kwargs.get('identity', None) class FlowEnvironment(msrest.serialization.Model): """FlowEnvironment. :ivar image: :vartype image: str :ivar python_requirements_txt: :vartype python_requirements_txt: str """ _attribute_map = { 'image': {'key': 'image', 'type': 'str'}, 'python_requirements_txt': {'key': 'python_requirements_txt', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword image: :paramtype image: str :keyword python_requirements_txt: :paramtype python_requirements_txt: str """ super(FlowEnvironment, self).__init__(**kwargs) self.image = kwargs.get('image', None) self.python_requirements_txt = kwargs.get('python_requirements_txt', None) class FlowFeature(msrest.serialization.Model): """FlowFeature. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar state: :vartype state: ~flow.models.FlowFeatureState """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'state': {'key': 'state', 'type': 'FlowFeatureState'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword state: :paramtype state: ~flow.models.FlowFeatureState """ super(FlowFeature, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.state = kwargs.get('state', None) class FlowFeatureState(msrest.serialization.Model): """FlowFeatureState. :ivar runtime: Possible values include: "Ready", "E2ETest". :vartype runtime: str or ~flow.models.FlowFeatureStateEnum :ivar executor: Possible values include: "Ready", "E2ETest". :vartype executor: str or ~flow.models.FlowFeatureStateEnum :ivar pfs: Possible values include: "Ready", "E2ETest". :vartype pfs: str or ~flow.models.FlowFeatureStateEnum """ _attribute_map = { 'runtime': {'key': 'Runtime', 'type': 'str'}, 'executor': {'key': 'Executor', 'type': 'str'}, 'pfs': {'key': 'PFS', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword runtime: Possible values include: "Ready", "E2ETest". :paramtype runtime: str or ~flow.models.FlowFeatureStateEnum :keyword executor: Possible values include: "Ready", "E2ETest". :paramtype executor: str or ~flow.models.FlowFeatureStateEnum :keyword pfs: Possible values include: "Ready", "E2ETest". :paramtype pfs: str or ~flow.models.FlowFeatureStateEnum """ super(FlowFeatureState, self).__init__(**kwargs) self.runtime = kwargs.get('runtime', None) self.executor = kwargs.get('executor', None) self.pfs = kwargs.get('pfs', None) class FlowGraph(msrest.serialization.Model): """FlowGraph. :ivar nodes: :vartype nodes: list[~flow.models.Node] :ivar tools: :vartype tools: list[~flow.models.Tool] :ivar codes: This is a dictionary. :vartype codes: dict[str, str] :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.FlowInputDefinition] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.FlowOutputDefinition] """ _attribute_map = { 'nodes': {'key': 'nodes', 'type': '[Node]'}, 'tools': {'key': 'tools', 'type': '[Tool]'}, 'codes': {'key': 'codes', 'type': '{str}'}, 'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'}, 'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'}, } def __init__( self, **kwargs ): """ :keyword nodes: :paramtype nodes: list[~flow.models.Node] :keyword tools: :paramtype tools: list[~flow.models.Tool] :keyword codes: This is a dictionary. :paramtype codes: dict[str, str] :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.FlowInputDefinition] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition] """ super(FlowGraph, self).__init__(**kwargs) self.nodes = kwargs.get('nodes', None) self.tools = kwargs.get('tools', None) self.codes = kwargs.get('codes', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) class FlowGraphAnnotationNode(msrest.serialization.Model): """FlowGraphAnnotationNode. :ivar id: :vartype id: str :ivar content: :vartype content: str :ivar mentioned_node_names: :vartype mentioned_node_names: list[str] :ivar structured_content: :vartype structured_content: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'content': {'key': 'content', 'type': 'str'}, 'mentioned_node_names': {'key': 'mentionedNodeNames', 'type': '[str]'}, 'structured_content': {'key': 'structuredContent', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword content: :paramtype content: str :keyword mentioned_node_names: :paramtype mentioned_node_names: list[str] :keyword structured_content: :paramtype structured_content: str """ super(FlowGraphAnnotationNode, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.content = kwargs.get('content', None) self.mentioned_node_names = kwargs.get('mentioned_node_names', None) self.structured_content = kwargs.get('structured_content', None) class FlowGraphLayout(msrest.serialization.Model): """FlowGraphLayout. :ivar node_layouts: This is a dictionary. :vartype node_layouts: dict[str, ~flow.models.FlowNodeLayout] :ivar extended_data: :vartype extended_data: str :ivar annotation_nodes: :vartype annotation_nodes: list[~flow.models.FlowGraphAnnotationNode] :ivar orientation: Possible values include: "Horizontal", "Vertical". :vartype orientation: str or ~flow.models.Orientation """ _attribute_map = { 'node_layouts': {'key': 'nodeLayouts', 'type': '{FlowNodeLayout}'}, 'extended_data': {'key': 'extendedData', 'type': 'str'}, 'annotation_nodes': {'key': 'annotationNodes', 'type': '[FlowGraphAnnotationNode]'}, 'orientation': {'key': 'orientation', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword node_layouts: This is a dictionary. :paramtype node_layouts: dict[str, ~flow.models.FlowNodeLayout] :keyword extended_data: :paramtype extended_data: str :keyword annotation_nodes: :paramtype annotation_nodes: list[~flow.models.FlowGraphAnnotationNode] :keyword orientation: Possible values include: "Horizontal", "Vertical". :paramtype orientation: str or ~flow.models.Orientation """ super(FlowGraphLayout, self).__init__(**kwargs) self.node_layouts = kwargs.get('node_layouts', None) self.extended_data = kwargs.get('extended_data', None) self.annotation_nodes = kwargs.get('annotation_nodes', None) self.orientation = kwargs.get('orientation', None) class FlowGraphReference(msrest.serialization.Model): """FlowGraphReference. :ivar flow_graph: :vartype flow_graph: ~flow.models.FlowGraph :ivar reference_resource_id: :vartype reference_resource_id: str """ _attribute_map = { 'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'}, 'reference_resource_id': {'key': 'referenceResourceId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword flow_graph: :paramtype flow_graph: ~flow.models.FlowGraph :keyword reference_resource_id: :paramtype reference_resource_id: str """ super(FlowGraphReference, self).__init__(**kwargs) self.flow_graph = kwargs.get('flow_graph', None) self.reference_resource_id = kwargs.get('reference_resource_id', None) class FlowIndexEntity(msrest.serialization.Model): """FlowIndexEntity. Variables are only populated by the server, and will be ignored when sending a request. :ivar schema_id: :vartype schema_id: str :ivar entity_id: :vartype entity_id: str :ivar kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned". :vartype kind: str or ~flow.models.EntityKind :ivar annotations: :vartype annotations: ~flow.models.FlowAnnotations :ivar properties: :vartype properties: ~flow.models.FlowProperties :ivar internal: Any object. :vartype internal: any :ivar update_sequence: :vartype update_sequence: long :ivar type: :vartype type: str :ivar version: :vartype version: str :ivar entity_container_id: :vartype entity_container_id: str :ivar entity_object_id: :vartype entity_object_id: str :ivar resource_type: :vartype resource_type: str :ivar relationships: :vartype relationships: list[~flow.models.Relationship] :ivar asset_id: :vartype asset_id: str """ _validation = { 'version': {'readonly': True}, 'entity_container_id': {'readonly': True}, 'entity_object_id': {'readonly': True}, 'resource_type': {'readonly': True}, } _attribute_map = { 'schema_id': {'key': 'schemaId', 'type': 'str'}, 'entity_id': {'key': 'entityId', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': 'FlowAnnotations'}, 'properties': {'key': 'properties', 'type': 'FlowProperties'}, 'internal': {'key': 'internal', 'type': 'object'}, 'update_sequence': {'key': 'updateSequence', 'type': 'long'}, 'type': {'key': 'type', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'entity_container_id': {'key': 'entityContainerId', 'type': 'str'}, 'entity_object_id': {'key': 'entityObjectId', 'type': 'str'}, 'resource_type': {'key': 'resourceType', 'type': 'str'}, 'relationships': {'key': 'relationships', 'type': '[Relationship]'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword schema_id: :paramtype schema_id: str :keyword entity_id: :paramtype entity_id: str :keyword kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned". :paramtype kind: str or ~flow.models.EntityKind :keyword annotations: :paramtype annotations: ~flow.models.FlowAnnotations :keyword properties: :paramtype properties: ~flow.models.FlowProperties :keyword internal: Any object. :paramtype internal: any :keyword update_sequence: :paramtype update_sequence: long :keyword type: :paramtype type: str :keyword relationships: :paramtype relationships: list[~flow.models.Relationship] :keyword asset_id: :paramtype asset_id: str """ super(FlowIndexEntity, self).__init__(**kwargs) self.schema_id = kwargs.get('schema_id', None) self.entity_id = kwargs.get('entity_id', None) self.kind = kwargs.get('kind', None) self.annotations = kwargs.get('annotations', None) self.properties = kwargs.get('properties', None) self.internal = kwargs.get('internal', None) self.update_sequence = kwargs.get('update_sequence', None) self.type = kwargs.get('type', None) self.version = None self.entity_container_id = None self.entity_object_id = None self.resource_type = None self.relationships = kwargs.get('relationships', None) self.asset_id = kwargs.get('asset_id', None) class FlowInputDefinition(msrest.serialization.Model): """FlowInputDefinition. :ivar name: :vartype name: str :ivar type: Possible values include: "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image". :vartype type: str or ~flow.models.ValueType :ivar default: Anything. :vartype default: any :ivar description: :vartype description: str :ivar is_chat_input: :vartype is_chat_input: bool :ivar is_chat_history: :vartype is_chat_history: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'default': {'key': 'default', 'type': 'object'}, 'description': {'key': 'description', 'type': 'str'}, 'is_chat_input': {'key': 'is_chat_input', 'type': 'bool'}, 'is_chat_history': {'key': 'is_chat_history', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: Possible values include: "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image". :paramtype type: str or ~flow.models.ValueType :keyword default: Anything. :paramtype default: any :keyword description: :paramtype description: str :keyword is_chat_input: :paramtype is_chat_input: bool :keyword is_chat_history: :paramtype is_chat_history: bool """ super(FlowInputDefinition, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.type = kwargs.get('type', None) self.default = kwargs.get('default', None) self.description = kwargs.get('description', None) self.is_chat_input = kwargs.get('is_chat_input', None) self.is_chat_history = kwargs.get('is_chat_history', None) class FlowNode(msrest.serialization.Model): """FlowNode. :ivar name: :vartype name: str :ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm", "csharp". :vartype type: str or ~flow.models.ToolType :ivar source: :vartype source: ~flow.models.NodeSource :ivar inputs: Dictionary of :code:`<any>`. :vartype inputs: dict[str, any] :ivar use_variants: :vartype use_variants: bool :ivar activate: :vartype activate: ~flow.models.Activate :ivar comment: :vartype comment: str :ivar api: :vartype api: str :ivar provider: :vartype provider: str :ivar connection: :vartype connection: str :ivar module: :vartype module: str :ivar aggregation: :vartype aggregation: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'source': {'key': 'source', 'type': 'NodeSource'}, 'inputs': {'key': 'inputs', 'type': '{object}'}, 'use_variants': {'key': 'use_variants', 'type': 'bool'}, 'activate': {'key': 'activate', 'type': 'Activate'}, 'comment': {'key': 'comment', 'type': 'str'}, 'api': {'key': 'api', 'type': 'str'}, 'provider': {'key': 'provider', 'type': 'str'}, 'connection': {'key': 'connection', 'type': 'str'}, 'module': {'key': 'module', 'type': 'str'}, 'aggregation': {'key': 'aggregation', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm", "csharp". :paramtype type: str or ~flow.models.ToolType :keyword source: :paramtype source: ~flow.models.NodeSource :keyword inputs: Dictionary of :code:`<any>`. :paramtype inputs: dict[str, any] :keyword use_variants: :paramtype use_variants: bool :keyword activate: :paramtype activate: ~flow.models.Activate :keyword comment: :paramtype comment: str :keyword api: :paramtype api: str :keyword provider: :paramtype provider: str :keyword connection: :paramtype connection: str :keyword module: :paramtype module: str :keyword aggregation: :paramtype aggregation: bool """ super(FlowNode, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.type = kwargs.get('type', None) self.source = kwargs.get('source', None) self.inputs = kwargs.get('inputs', None) self.use_variants = kwargs.get('use_variants', None) self.activate = kwargs.get('activate', None) self.comment = kwargs.get('comment', None) self.api = kwargs.get('api', None) self.provider = kwargs.get('provider', None) self.connection = kwargs.get('connection', None) self.module = kwargs.get('module', None) self.aggregation = kwargs.get('aggregation', None) class FlowNodeLayout(msrest.serialization.Model): """FlowNodeLayout. :ivar x: :vartype x: float :ivar y: :vartype y: float :ivar width: :vartype width: float :ivar height: :vartype height: float :ivar index: :vartype index: int :ivar extended_data: :vartype extended_data: str """ _attribute_map = { 'x': {'key': 'x', 'type': 'float'}, 'y': {'key': 'y', 'type': 'float'}, 'width': {'key': 'width', 'type': 'float'}, 'height': {'key': 'height', 'type': 'float'}, 'index': {'key': 'index', 'type': 'int'}, 'extended_data': {'key': 'extendedData', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword x: :paramtype x: float :keyword y: :paramtype y: float :keyword width: :paramtype width: float :keyword height: :paramtype height: float :keyword index: :paramtype index: int :keyword extended_data: :paramtype extended_data: str """ super(FlowNodeLayout, self).__init__(**kwargs) self.x = kwargs.get('x', None) self.y = kwargs.get('y', None) self.width = kwargs.get('width', None) self.height = kwargs.get('height', None) self.index = kwargs.get('index', None) self.extended_data = kwargs.get('extended_data', None) class FlowNodeVariant(msrest.serialization.Model): """FlowNodeVariant. :ivar default_variant_id: :vartype default_variant_id: str :ivar variants: This is a dictionary. :vartype variants: dict[str, ~flow.models.FlowVariantNode] """ _attribute_map = { 'default_variant_id': {'key': 'default_variant_id', 'type': 'str'}, 'variants': {'key': 'variants', 'type': '{FlowVariantNode}'}, } def __init__( self, **kwargs ): """ :keyword default_variant_id: :paramtype default_variant_id: str :keyword variants: This is a dictionary. :paramtype variants: dict[str, ~flow.models.FlowVariantNode] """ super(FlowNodeVariant, self).__init__(**kwargs) self.default_variant_id = kwargs.get('default_variant_id', None) self.variants = kwargs.get('variants', None) class FlowOutputDefinition(msrest.serialization.Model): """FlowOutputDefinition. :ivar name: :vartype name: str :ivar type: Possible values include: "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image". :vartype type: str or ~flow.models.ValueType :ivar description: :vartype description: str :ivar reference: :vartype reference: str :ivar evaluation_only: :vartype evaluation_only: bool :ivar is_chat_output: :vartype is_chat_output: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'reference': {'key': 'reference', 'type': 'str'}, 'evaluation_only': {'key': 'evaluation_only', 'type': 'bool'}, 'is_chat_output': {'key': 'is_chat_output', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: Possible values include: "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image". :paramtype type: str or ~flow.models.ValueType :keyword description: :paramtype description: str :keyword reference: :paramtype reference: str :keyword evaluation_only: :paramtype evaluation_only: bool :keyword is_chat_output: :paramtype is_chat_output: bool """ super(FlowOutputDefinition, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.type = kwargs.get('type', None) self.description = kwargs.get('description', None) self.reference = kwargs.get('reference', None) self.evaluation_only = kwargs.get('evaluation_only', None) self.is_chat_output = kwargs.get('is_chat_output', None) class FlowProperties(msrest.serialization.Model): """FlowProperties. :ivar flow_id: :vartype flow_id: str :ivar experiment_id: :vartype experiment_id: str :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar creation_context: :vartype creation_context: ~flow.models.CreationContext """ _attribute_map = { 'flow_id': {'key': 'flowId', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'creation_context': {'key': 'creationContext', 'type': 'CreationContext'}, } def __init__( self, **kwargs ): """ :keyword flow_id: :paramtype flow_id: str :keyword experiment_id: :paramtype experiment_id: str :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword creation_context: :paramtype creation_context: ~flow.models.CreationContext """ super(FlowProperties, self).__init__(**kwargs) self.flow_id = kwargs.get('flow_id', None) self.experiment_id = kwargs.get('experiment_id', None) self.flow_type = kwargs.get('flow_type', None) self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None) self.creation_context = kwargs.get('creation_context', None) class FlowRunBasePath(msrest.serialization.Model): """FlowRunBasePath. :ivar output_datastore_name: :vartype output_datastore_name: str :ivar base_path: :vartype base_path: str """ _attribute_map = { 'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'}, 'base_path': {'key': 'basePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword output_datastore_name: :paramtype output_datastore_name: str :keyword base_path: :paramtype base_path: str """ super(FlowRunBasePath, self).__init__(**kwargs) self.output_datastore_name = kwargs.get('output_datastore_name', None) self.base_path = kwargs.get('base_path', None) class FlowRunInfo(msrest.serialization.Model): """FlowRunInfo. :ivar flow_graph: :vartype flow_graph: ~flow.models.FlowGraph :ivar flow_graph_layout: :vartype flow_graph_layout: ~flow.models.FlowGraphLayout :ivar flow_name: :vartype flow_name: str :ivar flow_run_resource_id: :vartype flow_run_resource_id: str :ivar flow_run_id: :vartype flow_run_id: str :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar batch_inputs: :vartype batch_inputs: list[dict[str, any]] :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun". :vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar runtime_name: :vartype runtime_name: str :ivar bulk_test_id: :vartype bulk_test_id: str :ivar created_by: :vartype created_by: ~flow.models.SchemaContractsCreatedBy :ivar created_on: :vartype created_on: ~datetime.datetime :ivar inputs_mapping: This is a dictionary. :vartype inputs_mapping: dict[str, str] :ivar output_datastore_name: :vartype output_datastore_name: str :ivar child_run_base_path: :vartype child_run_base_path: str :ivar working_directory: :vartype working_directory: str :ivar flow_dag_file_relative_path: :vartype flow_dag_file_relative_path: str :ivar flow_snapshot_id: :vartype flow_snapshot_id: str :ivar studio_portal_endpoint: :vartype studio_portal_endpoint: str """ _attribute_map = { 'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'}, 'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'}, 'flow_run_id': {'key': 'flowRunId', 'type': 'str'}, 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'flow_run_type': {'key': 'flowRunType', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'}, 'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'}, 'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'}, 'child_run_base_path': {'key': 'childRunBasePath', 'type': 'str'}, 'working_directory': {'key': 'workingDirectory', 'type': 'str'}, 'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'}, 'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'}, 'studio_portal_endpoint': {'key': 'studioPortalEndpoint', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword flow_graph: :paramtype flow_graph: ~flow.models.FlowGraph :keyword flow_graph_layout: :paramtype flow_graph_layout: ~flow.models.FlowGraphLayout :keyword flow_name: :paramtype flow_name: str :keyword flow_run_resource_id: :paramtype flow_run_resource_id: str :keyword flow_run_id: :paramtype flow_run_id: str :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword batch_inputs: :paramtype batch_inputs: list[dict[str, any]] :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun". :paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword runtime_name: :paramtype runtime_name: str :keyword bulk_test_id: :paramtype bulk_test_id: str :keyword created_by: :paramtype created_by: ~flow.models.SchemaContractsCreatedBy :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword inputs_mapping: This is a dictionary. :paramtype inputs_mapping: dict[str, str] :keyword output_datastore_name: :paramtype output_datastore_name: str :keyword child_run_base_path: :paramtype child_run_base_path: str :keyword working_directory: :paramtype working_directory: str :keyword flow_dag_file_relative_path: :paramtype flow_dag_file_relative_path: str :keyword flow_snapshot_id: :paramtype flow_snapshot_id: str :keyword studio_portal_endpoint: :paramtype studio_portal_endpoint: str """ super(FlowRunInfo, self).__init__(**kwargs) self.flow_graph = kwargs.get('flow_graph', None) self.flow_graph_layout = kwargs.get('flow_graph_layout', None) self.flow_name = kwargs.get('flow_name', None) self.flow_run_resource_id = kwargs.get('flow_run_resource_id', None) self.flow_run_id = kwargs.get('flow_run_id', None) self.flow_run_display_name = kwargs.get('flow_run_display_name', None) self.batch_inputs = kwargs.get('batch_inputs', None) self.batch_data_input = kwargs.get('batch_data_input', None) self.flow_run_type = kwargs.get('flow_run_type', None) self.flow_type = kwargs.get('flow_type', None) self.runtime_name = kwargs.get('runtime_name', None) self.bulk_test_id = kwargs.get('bulk_test_id', None) self.created_by = kwargs.get('created_by', None) self.created_on = kwargs.get('created_on', None) self.inputs_mapping = kwargs.get('inputs_mapping', None) self.output_datastore_name = kwargs.get('output_datastore_name', None) self.child_run_base_path = kwargs.get('child_run_base_path', None) self.working_directory = kwargs.get('working_directory', None) self.flow_dag_file_relative_path = kwargs.get('flow_dag_file_relative_path', None) self.flow_snapshot_id = kwargs.get('flow_snapshot_id', None) self.studio_portal_endpoint = kwargs.get('studio_portal_endpoint', None) class FlowRunResult(msrest.serialization.Model): """FlowRunResult. :ivar flow_runs: :vartype flow_runs: list[any] :ivar node_runs: :vartype node_runs: list[any] :ivar error_response: The error response. :vartype error_response: ~flow.models.ErrorResponse :ivar flow_name: :vartype flow_name: str :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar flow_run_id: :vartype flow_run_id: str :ivar flow_graph: :vartype flow_graph: ~flow.models.FlowGraph :ivar flow_graph_layout: :vartype flow_graph_layout: ~flow.models.FlowGraphLayout :ivar flow_run_resource_id: :vartype flow_run_resource_id: str :ivar bulk_test_id: :vartype bulk_test_id: str :ivar batch_inputs: :vartype batch_inputs: list[dict[str, any]] :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar created_by: :vartype created_by: ~flow.models.SchemaContractsCreatedBy :ivar created_on: :vartype created_on: ~datetime.datetime :ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun". :vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar runtime_name: :vartype runtime_name: str :ivar aml_compute_name: :vartype aml_compute_name: str :ivar flow_run_logs: Dictionary of :code:`<string>`. :vartype flow_run_logs: dict[str, str] :ivar flow_test_mode: Possible values include: "Sync", "Async". :vartype flow_test_mode: str or ~flow.models.FlowTestMode :ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :ivar working_directory: :vartype working_directory: str :ivar flow_dag_file_relative_path: :vartype flow_dag_file_relative_path: str :ivar flow_snapshot_id: :vartype flow_snapshot_id: str :ivar variant_run_to_evaluation_runs_id_mapping: Dictionary of <componentsΒ·1k1eaegΒ·schemasΒ·flowrunresultΒ·propertiesΒ·variantruntoevaluationrunsidmappingΒ·additionalproperties>. :vartype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]] """ _attribute_map = { 'flow_runs': {'key': 'flow_runs', 'type': '[object]'}, 'node_runs': {'key': 'node_runs', 'type': '[object]'}, 'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'flow_run_id': {'key': 'flowRunId', 'type': 'str'}, 'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'}, 'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'}, 'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'}, 'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'}, 'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'flow_run_type': {'key': 'flowRunType', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'}, 'flow_run_logs': {'key': 'flowRunLogs', 'type': '{str}'}, 'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'}, 'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'}, 'working_directory': {'key': 'workingDirectory', 'type': 'str'}, 'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'}, 'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'}, 'variant_run_to_evaluation_runs_id_mapping': {'key': 'variantRunToEvaluationRunsIdMapping', 'type': '{[str]}'}, } def __init__( self, **kwargs ): """ :keyword flow_runs: :paramtype flow_runs: list[any] :keyword node_runs: :paramtype node_runs: list[any] :keyword error_response: The error response. :paramtype error_response: ~flow.models.ErrorResponse :keyword flow_name: :paramtype flow_name: str :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword flow_run_id: :paramtype flow_run_id: str :keyword flow_graph: :paramtype flow_graph: ~flow.models.FlowGraph :keyword flow_graph_layout: :paramtype flow_graph_layout: ~flow.models.FlowGraphLayout :keyword flow_run_resource_id: :paramtype flow_run_resource_id: str :keyword bulk_test_id: :paramtype bulk_test_id: str :keyword batch_inputs: :paramtype batch_inputs: list[dict[str, any]] :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword created_by: :paramtype created_by: ~flow.models.SchemaContractsCreatedBy :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun". :paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword runtime_name: :paramtype runtime_name: str :keyword aml_compute_name: :paramtype aml_compute_name: str :keyword flow_run_logs: Dictionary of :code:`<string>`. :paramtype flow_run_logs: dict[str, str] :keyword flow_test_mode: Possible values include: "Sync", "Async". :paramtype flow_test_mode: str or ~flow.models.FlowTestMode :keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :keyword working_directory: :paramtype working_directory: str :keyword flow_dag_file_relative_path: :paramtype flow_dag_file_relative_path: str :keyword flow_snapshot_id: :paramtype flow_snapshot_id: str :keyword variant_run_to_evaluation_runs_id_mapping: Dictionary of <componentsΒ·1k1eaegΒ·schemasΒ·flowrunresultΒ·propertiesΒ·variantruntoevaluationrunsidmappingΒ·additionalproperties>. :paramtype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]] """ super(FlowRunResult, self).__init__(**kwargs) self.flow_runs = kwargs.get('flow_runs', None) self.node_runs = kwargs.get('node_runs', None) self.error_response = kwargs.get('error_response', None) self.flow_name = kwargs.get('flow_name', None) self.flow_run_display_name = kwargs.get('flow_run_display_name', None) self.flow_run_id = kwargs.get('flow_run_id', None) self.flow_graph = kwargs.get('flow_graph', None) self.flow_graph_layout = kwargs.get('flow_graph_layout', None) self.flow_run_resource_id = kwargs.get('flow_run_resource_id', None) self.bulk_test_id = kwargs.get('bulk_test_id', None) self.batch_inputs = kwargs.get('batch_inputs', None) self.batch_data_input = kwargs.get('batch_data_input', None) self.created_by = kwargs.get('created_by', None) self.created_on = kwargs.get('created_on', None) self.flow_run_type = kwargs.get('flow_run_type', None) self.flow_type = kwargs.get('flow_type', None) self.runtime_name = kwargs.get('runtime_name', None) self.aml_compute_name = kwargs.get('aml_compute_name', None) self.flow_run_logs = kwargs.get('flow_run_logs', None) self.flow_test_mode = kwargs.get('flow_test_mode', None) self.flow_test_infos = kwargs.get('flow_test_infos', None) self.working_directory = kwargs.get('working_directory', None) self.flow_dag_file_relative_path = kwargs.get('flow_dag_file_relative_path', None) self.flow_snapshot_id = kwargs.get('flow_snapshot_id', None) self.variant_run_to_evaluation_runs_id_mapping = kwargs.get('variant_run_to_evaluation_runs_id_mapping', None) class FlowRunSettings(msrest.serialization.Model): """FlowRunSettings. :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval", "PairwiseEval". :vartype run_mode: str or ~flow.models.FlowRunMode :ivar batch_inputs: :vartype batch_inputs: list[dict[str, any]] :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar tuning_node_names: :vartype tuning_node_names: list[str] :ivar tuning_node_settings: This is a dictionary. :vartype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting] :ivar baseline_variant_id: :vartype baseline_variant_id: str :ivar default_variant_id: :vartype default_variant_id: str :ivar variants: This is a dictionary. :vartype variants: dict[str, list[~flow.models.Node]] :ivar variants_tools: :vartype variants_tools: list[~flow.models.Tool] :ivar variants_codes: This is a dictionary. :vartype variants_codes: dict[str, str] :ivar node_name: :vartype node_name: str :ivar bulk_test_id: :vartype bulk_test_id: str :ivar evaluation_flow_run_settings: This is a dictionary. :vartype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings] :ivar inputs_mapping: This is a dictionary. :vartype inputs_mapping: dict[str, str] :ivar data_inputs: This is a dictionary. :vartype data_inputs: dict[str, str] :ivar bulk_test_flow_id: :vartype bulk_test_flow_id: str :ivar bulk_test_flow_run_ids: :vartype bulk_test_flow_run_ids: list[str] :ivar aml_compute_name: :vartype aml_compute_name: str :ivar runtime_name: :vartype runtime_name: str :ivar flow_run_output_directory: :vartype flow_run_output_directory: str """ _attribute_map = { 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'run_mode': {'key': 'runMode', 'type': 'str'}, 'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'tuning_node_names': {'key': 'tuningNodeNames', 'type': '[str]'}, 'tuning_node_settings': {'key': 'tuningNodeSettings', 'type': '{TuningNodeSetting}'}, 'baseline_variant_id': {'key': 'baselineVariantId', 'type': 'str'}, 'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'}, 'variants': {'key': 'variants', 'type': '{[Node]}'}, 'variants_tools': {'key': 'variantsTools', 'type': '[Tool]'}, 'variants_codes': {'key': 'variantsCodes', 'type': '{str}'}, 'node_name': {'key': 'nodeName', 'type': 'str'}, 'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'}, 'evaluation_flow_run_settings': {'key': 'evaluationFlowRunSettings', 'type': '{EvaluationFlowRunSettings}'}, 'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'}, 'data_inputs': {'key': 'dataInputs', 'type': '{str}'}, 'bulk_test_flow_id': {'key': 'bulkTestFlowId', 'type': 'str'}, 'bulk_test_flow_run_ids': {'key': 'bulkTestFlowRunIds', 'type': '[str]'}, 'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval", "PairwiseEval". :paramtype run_mode: str or ~flow.models.FlowRunMode :keyword batch_inputs: :paramtype batch_inputs: list[dict[str, any]] :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword tuning_node_names: :paramtype tuning_node_names: list[str] :keyword tuning_node_settings: This is a dictionary. :paramtype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting] :keyword baseline_variant_id: :paramtype baseline_variant_id: str :keyword default_variant_id: :paramtype default_variant_id: str :keyword variants: This is a dictionary. :paramtype variants: dict[str, list[~flow.models.Node]] :keyword variants_tools: :paramtype variants_tools: list[~flow.models.Tool] :keyword variants_codes: This is a dictionary. :paramtype variants_codes: dict[str, str] :keyword node_name: :paramtype node_name: str :keyword bulk_test_id: :paramtype bulk_test_id: str :keyword evaluation_flow_run_settings: This is a dictionary. :paramtype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings] :keyword inputs_mapping: This is a dictionary. :paramtype inputs_mapping: dict[str, str] :keyword data_inputs: This is a dictionary. :paramtype data_inputs: dict[str, str] :keyword bulk_test_flow_id: :paramtype bulk_test_flow_id: str :keyword bulk_test_flow_run_ids: :paramtype bulk_test_flow_run_ids: list[str] :keyword aml_compute_name: :paramtype aml_compute_name: str :keyword runtime_name: :paramtype runtime_name: str :keyword flow_run_output_directory: :paramtype flow_run_output_directory: str """ super(FlowRunSettings, self).__init__(**kwargs) self.flow_run_display_name = kwargs.get('flow_run_display_name', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.run_mode = kwargs.get('run_mode', None) self.batch_inputs = kwargs.get('batch_inputs', None) self.batch_data_input = kwargs.get('batch_data_input', None) self.tuning_node_names = kwargs.get('tuning_node_names', None) self.tuning_node_settings = kwargs.get('tuning_node_settings', None) self.baseline_variant_id = kwargs.get('baseline_variant_id', None) self.default_variant_id = kwargs.get('default_variant_id', None) self.variants = kwargs.get('variants', None) self.variants_tools = kwargs.get('variants_tools', None) self.variants_codes = kwargs.get('variants_codes', None) self.node_name = kwargs.get('node_name', None) self.bulk_test_id = kwargs.get('bulk_test_id', None) self.evaluation_flow_run_settings = kwargs.get('evaluation_flow_run_settings', None) self.inputs_mapping = kwargs.get('inputs_mapping', None) self.data_inputs = kwargs.get('data_inputs', None) self.bulk_test_flow_id = kwargs.get('bulk_test_flow_id', None) self.bulk_test_flow_run_ids = kwargs.get('bulk_test_flow_run_ids', None) self.aml_compute_name = kwargs.get('aml_compute_name', None) self.runtime_name = kwargs.get('runtime_name', None) self.flow_run_output_directory = kwargs.get('flow_run_output_directory', None) class FlowRuntimeCapability(msrest.serialization.Model): """FlowRuntimeCapability. :ivar flow_features: :vartype flow_features: list[~flow.models.FlowFeature] """ _attribute_map = { 'flow_features': {'key': 'flowFeatures', 'type': '[FlowFeature]'}, } def __init__( self, **kwargs ): """ :keyword flow_features: :paramtype flow_features: list[~flow.models.FlowFeature] """ super(FlowRuntimeCapability, self).__init__(**kwargs) self.flow_features = kwargs.get('flow_features', None) class FlowRuntimeDto(msrest.serialization.Model): """FlowRuntimeDto. :ivar runtime_name: :vartype runtime_name: str :ivar runtime_description: :vartype runtime_description: str :ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession". :vartype runtime_type: str or ~flow.models.RuntimeType :ivar environment: :vartype environment: str :ivar status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting", "Stopping". :vartype status: str or ~flow.models.RuntimeStatusEnum :ivar status_message: :vartype status_message: str :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse :ivar from_existing_endpoint: :vartype from_existing_endpoint: bool :ivar endpoint_name: :vartype endpoint_name: str :ivar from_existing_deployment: :vartype from_existing_deployment: bool :ivar deployment_name: :vartype deployment_name: str :ivar identity: :vartype identity: ~flow.models.ManagedServiceIdentity :ivar instance_type: :vartype instance_type: str :ivar instance_count: :vartype instance_count: int :ivar compute_instance_name: :vartype compute_instance_name: str :ivar docker_image: :vartype docker_image: str :ivar published_port: :vartype published_port: int :ivar target_port: :vartype target_port: int :ivar from_existing_custom_app: :vartype from_existing_custom_app: bool :ivar custom_app_name: :vartype custom_app_name: str :ivar assigned_to: :vartype assigned_to: ~flow.models.AssignedUser :ivar endpoint_url: :vartype endpoint_url: str :ivar created_on: :vartype created_on: ~datetime.datetime :ivar modified_on: :vartype modified_on: ~datetime.datetime :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy """ _attribute_map = { 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'runtime_description': {'key': 'runtimeDescription', 'type': 'str'}, 'runtime_type': {'key': 'runtimeType', 'type': 'str'}, 'environment': {'key': 'environment', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'status_message': {'key': 'statusMessage', 'type': 'str'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, 'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'}, 'endpoint_name': {'key': 'endpointName', 'type': 'str'}, 'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'}, 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'}, 'docker_image': {'key': 'dockerImage', 'type': 'str'}, 'published_port': {'key': 'publishedPort', 'type': 'int'}, 'target_port': {'key': 'targetPort', 'type': 'int'}, 'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'}, 'custom_app_name': {'key': 'customAppName', 'type': 'str'}, 'assigned_to': {'key': 'assignedTo', 'type': 'AssignedUser'}, 'endpoint_url': {'key': 'endpointUrl', 'type': 'str'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, } def __init__( self, **kwargs ): """ :keyword runtime_name: :paramtype runtime_name: str :keyword runtime_description: :paramtype runtime_description: str :keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession". :paramtype runtime_type: str or ~flow.models.RuntimeType :keyword environment: :paramtype environment: str :keyword status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting", "Stopping". :paramtype status: str or ~flow.models.RuntimeStatusEnum :keyword status_message: :paramtype status_message: str :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse :keyword from_existing_endpoint: :paramtype from_existing_endpoint: bool :keyword endpoint_name: :paramtype endpoint_name: str :keyword from_existing_deployment: :paramtype from_existing_deployment: bool :keyword deployment_name: :paramtype deployment_name: str :keyword identity: :paramtype identity: ~flow.models.ManagedServiceIdentity :keyword instance_type: :paramtype instance_type: str :keyword instance_count: :paramtype instance_count: int :keyword compute_instance_name: :paramtype compute_instance_name: str :keyword docker_image: :paramtype docker_image: str :keyword published_port: :paramtype published_port: int :keyword target_port: :paramtype target_port: int :keyword from_existing_custom_app: :paramtype from_existing_custom_app: bool :keyword custom_app_name: :paramtype custom_app_name: str :keyword assigned_to: :paramtype assigned_to: ~flow.models.AssignedUser :keyword endpoint_url: :paramtype endpoint_url: str :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword modified_on: :paramtype modified_on: ~datetime.datetime :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy """ super(FlowRuntimeDto, self).__init__(**kwargs) self.runtime_name = kwargs.get('runtime_name', None) self.runtime_description = kwargs.get('runtime_description', None) self.runtime_type = kwargs.get('runtime_type', None) self.environment = kwargs.get('environment', None) self.status = kwargs.get('status', None) self.status_message = kwargs.get('status_message', None) self.error = kwargs.get('error', None) self.from_existing_endpoint = kwargs.get('from_existing_endpoint', None) self.endpoint_name = kwargs.get('endpoint_name', None) self.from_existing_deployment = kwargs.get('from_existing_deployment', None) self.deployment_name = kwargs.get('deployment_name', None) self.identity = kwargs.get('identity', None) self.instance_type = kwargs.get('instance_type', None) self.instance_count = kwargs.get('instance_count', None) self.compute_instance_name = kwargs.get('compute_instance_name', None) self.docker_image = kwargs.get('docker_image', None) self.published_port = kwargs.get('published_port', None) self.target_port = kwargs.get('target_port', None) self.from_existing_custom_app = kwargs.get('from_existing_custom_app', None) self.custom_app_name = kwargs.get('custom_app_name', None) self.assigned_to = kwargs.get('assigned_to', None) self.endpoint_url = kwargs.get('endpoint_url', None) self.created_on = kwargs.get('created_on', None) self.modified_on = kwargs.get('modified_on', None) self.owner = kwargs.get('owner', None) class FlowSampleDto(msrest.serialization.Model): """FlowSampleDto. :ivar sample_resource_id: :vartype sample_resource_id: str :ivar section: Possible values include: "Gallery", "Template". :vartype section: str or ~flow.models.Section :ivar index_number: :vartype index_number: int :ivar flow_name: :vartype flow_name: str :ivar description: :vartype description: str :ivar details: :vartype details: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar flow: :vartype flow: ~flow.models.Flow :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar flow_run_settings: :vartype flow_run_settings: ~flow.models.FlowRunSettings :ivar is_archived: :vartype is_archived: bool :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar identity: :vartype identity: str """ _attribute_map = { 'sample_resource_id': {'key': 'sampleResourceId', 'type': 'str'}, 'section': {'key': 'section', 'type': 'str'}, 'index_number': {'key': 'indexNumber', 'type': 'int'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'details': {'key': 'details', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'flow': {'key': 'flow', 'type': 'Flow'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword sample_resource_id: :paramtype sample_resource_id: str :keyword section: Possible values include: "Gallery", "Template". :paramtype section: str or ~flow.models.Section :keyword index_number: :paramtype index_number: int :keyword flow_name: :paramtype flow_name: str :keyword description: :paramtype description: str :keyword details: :paramtype details: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword flow: :paramtype flow: ~flow.models.Flow :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword flow_run_settings: :paramtype flow_run_settings: ~flow.models.FlowRunSettings :keyword is_archived: :paramtype is_archived: bool :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword identity: :paramtype identity: str """ super(FlowSampleDto, self).__init__(**kwargs) self.sample_resource_id = kwargs.get('sample_resource_id', None) self.section = kwargs.get('section', None) self.index_number = kwargs.get('index_number', None) self.flow_name = kwargs.get('flow_name', None) self.description = kwargs.get('description', None) self.details = kwargs.get('details', None) self.tags = kwargs.get('tags', None) self.flow = kwargs.get('flow', None) self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None) self.flow_type = kwargs.get('flow_type', None) self.flow_run_settings = kwargs.get('flow_run_settings', None) self.is_archived = kwargs.get('is_archived', None) self.vm_size = kwargs.get('vm_size', None) self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None) self.identity = kwargs.get('identity', None) class FlowSessionDto(msrest.serialization.Model): """FlowSessionDto. :ivar session_id: :vartype session_id: str :ivar base_image: :vartype base_image: str :ivar packages: :vartype packages: list[str] :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar flow_features: :vartype flow_features: list[~flow.models.FlowFeature] :ivar runtime_name: :vartype runtime_name: str :ivar runtime_description: :vartype runtime_description: str :ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession". :vartype runtime_type: str or ~flow.models.RuntimeType :ivar environment: :vartype environment: str :ivar status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting", "Stopping". :vartype status: str or ~flow.models.RuntimeStatusEnum :ivar status_message: :vartype status_message: str :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse :ivar from_existing_endpoint: :vartype from_existing_endpoint: bool :ivar endpoint_name: :vartype endpoint_name: str :ivar from_existing_deployment: :vartype from_existing_deployment: bool :ivar deployment_name: :vartype deployment_name: str :ivar identity: :vartype identity: ~flow.models.ManagedServiceIdentity :ivar instance_type: :vartype instance_type: str :ivar instance_count: :vartype instance_count: int :ivar compute_instance_name: :vartype compute_instance_name: str :ivar docker_image: :vartype docker_image: str :ivar published_port: :vartype published_port: int :ivar target_port: :vartype target_port: int :ivar from_existing_custom_app: :vartype from_existing_custom_app: bool :ivar custom_app_name: :vartype custom_app_name: str :ivar assigned_to: :vartype assigned_to: ~flow.models.AssignedUser :ivar endpoint_url: :vartype endpoint_url: str :ivar created_on: :vartype created_on: ~datetime.datetime :ivar modified_on: :vartype modified_on: ~datetime.datetime :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy """ _attribute_map = { 'session_id': {'key': 'sessionId', 'type': 'str'}, 'base_image': {'key': 'baseImage', 'type': 'str'}, 'packages': {'key': 'packages', 'type': '[str]'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'flow_features': {'key': 'flowFeatures', 'type': '[FlowFeature]'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'runtime_description': {'key': 'runtimeDescription', 'type': 'str'}, 'runtime_type': {'key': 'runtimeType', 'type': 'str'}, 'environment': {'key': 'environment', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'status_message': {'key': 'statusMessage', 'type': 'str'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, 'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'}, 'endpoint_name': {'key': 'endpointName', 'type': 'str'}, 'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'}, 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'}, 'docker_image': {'key': 'dockerImage', 'type': 'str'}, 'published_port': {'key': 'publishedPort', 'type': 'int'}, 'target_port': {'key': 'targetPort', 'type': 'int'}, 'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'}, 'custom_app_name': {'key': 'customAppName', 'type': 'str'}, 'assigned_to': {'key': 'assignedTo', 'type': 'AssignedUser'}, 'endpoint_url': {'key': 'endpointUrl', 'type': 'str'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, } def __init__( self, **kwargs ): """ :keyword session_id: :paramtype session_id: str :keyword base_image: :paramtype base_image: str :keyword packages: :paramtype packages: list[str] :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword flow_features: :paramtype flow_features: list[~flow.models.FlowFeature] :keyword runtime_name: :paramtype runtime_name: str :keyword runtime_description: :paramtype runtime_description: str :keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession". :paramtype runtime_type: str or ~flow.models.RuntimeType :keyword environment: :paramtype environment: str :keyword status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting", "Stopping". :paramtype status: str or ~flow.models.RuntimeStatusEnum :keyword status_message: :paramtype status_message: str :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse :keyword from_existing_endpoint: :paramtype from_existing_endpoint: bool :keyword endpoint_name: :paramtype endpoint_name: str :keyword from_existing_deployment: :paramtype from_existing_deployment: bool :keyword deployment_name: :paramtype deployment_name: str :keyword identity: :paramtype identity: ~flow.models.ManagedServiceIdentity :keyword instance_type: :paramtype instance_type: str :keyword instance_count: :paramtype instance_count: int :keyword compute_instance_name: :paramtype compute_instance_name: str :keyword docker_image: :paramtype docker_image: str :keyword published_port: :paramtype published_port: int :keyword target_port: :paramtype target_port: int :keyword from_existing_custom_app: :paramtype from_existing_custom_app: bool :keyword custom_app_name: :paramtype custom_app_name: str :keyword assigned_to: :paramtype assigned_to: ~flow.models.AssignedUser :keyword endpoint_url: :paramtype endpoint_url: str :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword modified_on: :paramtype modified_on: ~datetime.datetime :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy """ super(FlowSessionDto, self).__init__(**kwargs) self.session_id = kwargs.get('session_id', None) self.base_image = kwargs.get('base_image', None) self.packages = kwargs.get('packages', None) self.vm_size = kwargs.get('vm_size', None) self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None) self.flow_features = kwargs.get('flow_features', None) self.runtime_name = kwargs.get('runtime_name', None) self.runtime_description = kwargs.get('runtime_description', None) self.runtime_type = kwargs.get('runtime_type', None) self.environment = kwargs.get('environment', None) self.status = kwargs.get('status', None) self.status_message = kwargs.get('status_message', None) self.error = kwargs.get('error', None) self.from_existing_endpoint = kwargs.get('from_existing_endpoint', None) self.endpoint_name = kwargs.get('endpoint_name', None) self.from_existing_deployment = kwargs.get('from_existing_deployment', None) self.deployment_name = kwargs.get('deployment_name', None) self.identity = kwargs.get('identity', None) self.instance_type = kwargs.get('instance_type', None) self.instance_count = kwargs.get('instance_count', None) self.compute_instance_name = kwargs.get('compute_instance_name', None) self.docker_image = kwargs.get('docker_image', None) self.published_port = kwargs.get('published_port', None) self.target_port = kwargs.get('target_port', None) self.from_existing_custom_app = kwargs.get('from_existing_custom_app', None) self.custom_app_name = kwargs.get('custom_app_name', None) self.assigned_to = kwargs.get('assigned_to', None) self.endpoint_url = kwargs.get('endpoint_url', None) self.created_on = kwargs.get('created_on', None) self.modified_on = kwargs.get('modified_on', None) self.owner = kwargs.get('owner', None) class FlowSnapshot(msrest.serialization.Model): """FlowSnapshot. :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.FlowInputDefinition] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.FlowOutputDefinition] :ivar nodes: :vartype nodes: list[~flow.models.FlowNode] :ivar node_variants: This is a dictionary. :vartype node_variants: dict[str, ~flow.models.FlowNodeVariant] :ivar environment: :vartype environment: ~flow.models.FlowEnvironment :ivar environment_variables: This is a dictionary. :vartype environment_variables: dict[str, any] :ivar language: Possible values include: "Python", "CSharp". :vartype language: str or ~flow.models.FlowLanguage """ _attribute_map = { 'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'}, 'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'}, 'nodes': {'key': 'nodes', 'type': '[FlowNode]'}, 'node_variants': {'key': 'node_variants', 'type': '{FlowNodeVariant}'}, 'environment': {'key': 'environment', 'type': 'FlowEnvironment'}, 'environment_variables': {'key': 'environment_variables', 'type': '{object}'}, 'language': {'key': 'language', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.FlowInputDefinition] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition] :keyword nodes: :paramtype nodes: list[~flow.models.FlowNode] :keyword node_variants: This is a dictionary. :paramtype node_variants: dict[str, ~flow.models.FlowNodeVariant] :keyword environment: :paramtype environment: ~flow.models.FlowEnvironment :keyword environment_variables: This is a dictionary. :paramtype environment_variables: dict[str, any] :keyword language: Possible values include: "Python", "CSharp". :paramtype language: str or ~flow.models.FlowLanguage """ super(FlowSnapshot, self).__init__(**kwargs) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.nodes = kwargs.get('nodes', None) self.node_variants = kwargs.get('node_variants', None) self.environment = kwargs.get('environment', None) self.environment_variables = kwargs.get('environment_variables', None) self.language = kwargs.get('language', None) class FlowSubmitRunSettings(msrest.serialization.Model): """FlowSubmitRunSettings. :ivar node_inputs: This is a dictionary. :vartype node_inputs: dict[str, any] :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval", "PairwiseEval". :vartype run_mode: str or ~flow.models.FlowRunMode :ivar batch_inputs: :vartype batch_inputs: list[dict[str, any]] :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar tuning_node_names: :vartype tuning_node_names: list[str] :ivar tuning_node_settings: This is a dictionary. :vartype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting] :ivar baseline_variant_id: :vartype baseline_variant_id: str :ivar default_variant_id: :vartype default_variant_id: str :ivar variants: This is a dictionary. :vartype variants: dict[str, list[~flow.models.Node]] :ivar variants_tools: :vartype variants_tools: list[~flow.models.Tool] :ivar variants_codes: This is a dictionary. :vartype variants_codes: dict[str, str] :ivar node_name: :vartype node_name: str :ivar bulk_test_id: :vartype bulk_test_id: str :ivar evaluation_flow_run_settings: This is a dictionary. :vartype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings] :ivar inputs_mapping: This is a dictionary. :vartype inputs_mapping: dict[str, str] :ivar data_inputs: This is a dictionary. :vartype data_inputs: dict[str, str] :ivar bulk_test_flow_id: :vartype bulk_test_flow_id: str :ivar bulk_test_flow_run_ids: :vartype bulk_test_flow_run_ids: list[str] :ivar aml_compute_name: :vartype aml_compute_name: str :ivar runtime_name: :vartype runtime_name: str :ivar flow_run_output_directory: :vartype flow_run_output_directory: str """ _attribute_map = { 'node_inputs': {'key': 'nodeInputs', 'type': '{object}'}, 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'run_mode': {'key': 'runMode', 'type': 'str'}, 'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'tuning_node_names': {'key': 'tuningNodeNames', 'type': '[str]'}, 'tuning_node_settings': {'key': 'tuningNodeSettings', 'type': '{TuningNodeSetting}'}, 'baseline_variant_id': {'key': 'baselineVariantId', 'type': 'str'}, 'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'}, 'variants': {'key': 'variants', 'type': '{[Node]}'}, 'variants_tools': {'key': 'variantsTools', 'type': '[Tool]'}, 'variants_codes': {'key': 'variantsCodes', 'type': '{str}'}, 'node_name': {'key': 'nodeName', 'type': 'str'}, 'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'}, 'evaluation_flow_run_settings': {'key': 'evaluationFlowRunSettings', 'type': '{EvaluationFlowRunSettings}'}, 'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'}, 'data_inputs': {'key': 'dataInputs', 'type': '{str}'}, 'bulk_test_flow_id': {'key': 'bulkTestFlowId', 'type': 'str'}, 'bulk_test_flow_run_ids': {'key': 'bulkTestFlowRunIds', 'type': '[str]'}, 'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword node_inputs: This is a dictionary. :paramtype node_inputs: dict[str, any] :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval", "PairwiseEval". :paramtype run_mode: str or ~flow.models.FlowRunMode :keyword batch_inputs: :paramtype batch_inputs: list[dict[str, any]] :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword tuning_node_names: :paramtype tuning_node_names: list[str] :keyword tuning_node_settings: This is a dictionary. :paramtype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting] :keyword baseline_variant_id: :paramtype baseline_variant_id: str :keyword default_variant_id: :paramtype default_variant_id: str :keyword variants: This is a dictionary. :paramtype variants: dict[str, list[~flow.models.Node]] :keyword variants_tools: :paramtype variants_tools: list[~flow.models.Tool] :keyword variants_codes: This is a dictionary. :paramtype variants_codes: dict[str, str] :keyword node_name: :paramtype node_name: str :keyword bulk_test_id: :paramtype bulk_test_id: str :keyword evaluation_flow_run_settings: This is a dictionary. :paramtype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings] :keyword inputs_mapping: This is a dictionary. :paramtype inputs_mapping: dict[str, str] :keyword data_inputs: This is a dictionary. :paramtype data_inputs: dict[str, str] :keyword bulk_test_flow_id: :paramtype bulk_test_flow_id: str :keyword bulk_test_flow_run_ids: :paramtype bulk_test_flow_run_ids: list[str] :keyword aml_compute_name: :paramtype aml_compute_name: str :keyword runtime_name: :paramtype runtime_name: str :keyword flow_run_output_directory: :paramtype flow_run_output_directory: str """ super(FlowSubmitRunSettings, self).__init__(**kwargs) self.node_inputs = kwargs.get('node_inputs', None) self.flow_run_display_name = kwargs.get('flow_run_display_name', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.run_mode = kwargs.get('run_mode', None) self.batch_inputs = kwargs.get('batch_inputs', None) self.batch_data_input = kwargs.get('batch_data_input', None) self.tuning_node_names = kwargs.get('tuning_node_names', None) self.tuning_node_settings = kwargs.get('tuning_node_settings', None) self.baseline_variant_id = kwargs.get('baseline_variant_id', None) self.default_variant_id = kwargs.get('default_variant_id', None) self.variants = kwargs.get('variants', None) self.variants_tools = kwargs.get('variants_tools', None) self.variants_codes = kwargs.get('variants_codes', None) self.node_name = kwargs.get('node_name', None) self.bulk_test_id = kwargs.get('bulk_test_id', None) self.evaluation_flow_run_settings = kwargs.get('evaluation_flow_run_settings', None) self.inputs_mapping = kwargs.get('inputs_mapping', None) self.data_inputs = kwargs.get('data_inputs', None) self.bulk_test_flow_id = kwargs.get('bulk_test_flow_id', None) self.bulk_test_flow_run_ids = kwargs.get('bulk_test_flow_run_ids', None) self.aml_compute_name = kwargs.get('aml_compute_name', None) self.runtime_name = kwargs.get('runtime_name', None) self.flow_run_output_directory = kwargs.get('flow_run_output_directory', None) class FlowTestInfo(msrest.serialization.Model): """FlowTestInfo. :ivar flow_run_id: :vartype flow_run_id: str :ivar flow_test_storage_setting: :vartype flow_test_storage_setting: ~flow.models.FlowTestStorageSetting """ _attribute_map = { 'flow_run_id': {'key': 'flowRunId', 'type': 'str'}, 'flow_test_storage_setting': {'key': 'flowTestStorageSetting', 'type': 'FlowTestStorageSetting'}, } def __init__( self, **kwargs ): """ :keyword flow_run_id: :paramtype flow_run_id: str :keyword flow_test_storage_setting: :paramtype flow_test_storage_setting: ~flow.models.FlowTestStorageSetting """ super(FlowTestInfo, self).__init__(**kwargs) self.flow_run_id = kwargs.get('flow_run_id', None) self.flow_test_storage_setting = kwargs.get('flow_test_storage_setting', None) class FlowTestStorageSetting(msrest.serialization.Model): """FlowTestStorageSetting. :ivar storage_account_name: :vartype storage_account_name: str :ivar blob_container_name: :vartype blob_container_name: str :ivar flow_artifacts_root_path: :vartype flow_artifacts_root_path: str :ivar output_datastore_name: :vartype output_datastore_name: str """ _attribute_map = { 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, 'blob_container_name': {'key': 'blobContainerName', 'type': 'str'}, 'flow_artifacts_root_path': {'key': 'flowArtifactsRootPath', 'type': 'str'}, 'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword storage_account_name: :paramtype storage_account_name: str :keyword blob_container_name: :paramtype blob_container_name: str :keyword flow_artifacts_root_path: :paramtype flow_artifacts_root_path: str :keyword output_datastore_name: :paramtype output_datastore_name: str """ super(FlowTestStorageSetting, self).__init__(**kwargs) self.storage_account_name = kwargs.get('storage_account_name', None) self.blob_container_name = kwargs.get('blob_container_name', None) self.flow_artifacts_root_path = kwargs.get('flow_artifacts_root_path', None) self.output_datastore_name = kwargs.get('output_datastore_name', None) class FlowToolsDto(msrest.serialization.Model): """FlowToolsDto. :ivar package: This is a dictionary. :vartype package: dict[str, ~flow.models.Tool] :ivar code: This is a dictionary. :vartype code: dict[str, ~flow.models.Tool] :ivar errors: This is a dictionary. :vartype errors: dict[str, ~flow.models.ErrorResponse] """ _attribute_map = { 'package': {'key': 'package', 'type': '{Tool}'}, 'code': {'key': 'code', 'type': '{Tool}'}, 'errors': {'key': 'errors', 'type': '{ErrorResponse}'}, } def __init__( self, **kwargs ): """ :keyword package: This is a dictionary. :paramtype package: dict[str, ~flow.models.Tool] :keyword code: This is a dictionary. :paramtype code: dict[str, ~flow.models.Tool] :keyword errors: This is a dictionary. :paramtype errors: dict[str, ~flow.models.ErrorResponse] """ super(FlowToolsDto, self).__init__(**kwargs) self.package = kwargs.get('package', None) self.code = kwargs.get('code', None) self.errors = kwargs.get('errors', None) class FlowToolSettingParameter(msrest.serialization.Model): """FlowToolSettingParameter. :ivar type: :vartype type: list[str or ~flow.models.ValueType] :ivar default: :vartype default: str :ivar advanced: :vartype advanced: bool :ivar enum: :vartype enum: list[any] :ivar model_list: :vartype model_list: list[str] :ivar text_box_size: :vartype text_box_size: int :ivar capabilities: :vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities :ivar allow_manual_entry: :vartype allow_manual_entry: bool """ _attribute_map = { 'type': {'key': 'type', 'type': '[str]'}, 'default': {'key': 'default', 'type': 'str'}, 'advanced': {'key': 'advanced', 'type': 'bool'}, 'enum': {'key': 'enum', 'type': '[object]'}, 'model_list': {'key': 'model_list', 'type': '[str]'}, 'text_box_size': {'key': 'text_box_size', 'type': 'int'}, 'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'}, 'allow_manual_entry': {'key': 'allow_manual_entry', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword type: :paramtype type: list[str or ~flow.models.ValueType] :keyword default: :paramtype default: str :keyword advanced: :paramtype advanced: bool :keyword enum: :paramtype enum: list[any] :keyword model_list: :paramtype model_list: list[str] :keyword text_box_size: :paramtype text_box_size: int :keyword capabilities: :paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities :keyword allow_manual_entry: :paramtype allow_manual_entry: bool """ super(FlowToolSettingParameter, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.default = kwargs.get('default', None) self.advanced = kwargs.get('advanced', None) self.enum = kwargs.get('enum', None) self.model_list = kwargs.get('model_list', None) self.text_box_size = kwargs.get('text_box_size', None) self.capabilities = kwargs.get('capabilities', None) self.allow_manual_entry = kwargs.get('allow_manual_entry', None) class FlowVariantNode(msrest.serialization.Model): """FlowVariantNode. :ivar node: :vartype node: ~flow.models.FlowNode :ivar description: :vartype description: str """ _attribute_map = { 'node': {'key': 'node', 'type': 'FlowNode'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword node: :paramtype node: ~flow.models.FlowNode :keyword description: :paramtype description: str """ super(FlowVariantNode, self).__init__(**kwargs) self.node = kwargs.get('node', None) self.description = kwargs.get('description', None) class ForecastHorizon(msrest.serialization.Model): """ForecastHorizon. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.ForecastHorizonMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.ForecastHorizonMode :keyword value: :paramtype value: int """ super(ForecastHorizon, self).__init__(**kwargs) self.mode = kwargs.get('mode', None) self.value = kwargs.get('value', None) class ForecastingSettings(msrest.serialization.Model): """ForecastingSettings. :ivar country_or_region_for_holidays: :vartype country_or_region_for_holidays: str :ivar time_column_name: :vartype time_column_name: str :ivar target_lags: :vartype target_lags: ~flow.models.TargetLags :ivar target_rolling_window_size: :vartype target_rolling_window_size: ~flow.models.TargetRollingWindowSize :ivar forecast_horizon: :vartype forecast_horizon: ~flow.models.ForecastHorizon :ivar time_series_id_column_names: :vartype time_series_id_column_names: list[str] :ivar frequency: :vartype frequency: str :ivar feature_lags: :vartype feature_lags: str :ivar seasonality: :vartype seasonality: ~flow.models.Seasonality :ivar short_series_handling_config: Possible values include: "Auto", "Pad", "Drop". :vartype short_series_handling_config: str or ~flow.models.ShortSeriesHandlingConfiguration :ivar use_stl: Possible values include: "Season", "SeasonTrend". :vartype use_stl: str or ~flow.models.UseStl :ivar target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean". :vartype target_aggregate_function: str or ~flow.models.TargetAggregationFunction :ivar cv_step_size: :vartype cv_step_size: int :ivar features_unknown_at_forecast_time: :vartype features_unknown_at_forecast_time: list[str] """ _attribute_map = { 'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'}, 'time_column_name': {'key': 'timeColumnName', 'type': 'str'}, 'target_lags': {'key': 'targetLags', 'type': 'TargetLags'}, 'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'TargetRollingWindowSize'}, 'forecast_horizon': {'key': 'forecastHorizon', 'type': 'ForecastHorizon'}, 'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'}, 'frequency': {'key': 'frequency', 'type': 'str'}, 'feature_lags': {'key': 'featureLags', 'type': 'str'}, 'seasonality': {'key': 'seasonality', 'type': 'Seasonality'}, 'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'}, 'use_stl': {'key': 'useStl', 'type': 'str'}, 'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'}, 'cv_step_size': {'key': 'cvStepSize', 'type': 'int'}, 'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword country_or_region_for_holidays: :paramtype country_or_region_for_holidays: str :keyword time_column_name: :paramtype time_column_name: str :keyword target_lags: :paramtype target_lags: ~flow.models.TargetLags :keyword target_rolling_window_size: :paramtype target_rolling_window_size: ~flow.models.TargetRollingWindowSize :keyword forecast_horizon: :paramtype forecast_horizon: ~flow.models.ForecastHorizon :keyword time_series_id_column_names: :paramtype time_series_id_column_names: list[str] :keyword frequency: :paramtype frequency: str :keyword feature_lags: :paramtype feature_lags: str :keyword seasonality: :paramtype seasonality: ~flow.models.Seasonality :keyword short_series_handling_config: Possible values include: "Auto", "Pad", "Drop". :paramtype short_series_handling_config: str or ~flow.models.ShortSeriesHandlingConfiguration :keyword use_stl: Possible values include: "Season", "SeasonTrend". :paramtype use_stl: str or ~flow.models.UseStl :keyword target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean". :paramtype target_aggregate_function: str or ~flow.models.TargetAggregationFunction :keyword cv_step_size: :paramtype cv_step_size: int :keyword features_unknown_at_forecast_time: :paramtype features_unknown_at_forecast_time: list[str] """ super(ForecastingSettings, self).__init__(**kwargs) self.country_or_region_for_holidays = kwargs.get('country_or_region_for_holidays', None) self.time_column_name = kwargs.get('time_column_name', None) self.target_lags = kwargs.get('target_lags', None) self.target_rolling_window_size = kwargs.get('target_rolling_window_size', None) self.forecast_horizon = kwargs.get('forecast_horizon', None) self.time_series_id_column_names = kwargs.get('time_series_id_column_names', None) self.frequency = kwargs.get('frequency', None) self.feature_lags = kwargs.get('feature_lags', None) self.seasonality = kwargs.get('seasonality', None) self.short_series_handling_config = kwargs.get('short_series_handling_config', None) self.use_stl = kwargs.get('use_stl', None) self.target_aggregate_function = kwargs.get('target_aggregate_function', None) self.cv_step_size = kwargs.get('cv_step_size', None) self.features_unknown_at_forecast_time = kwargs.get('features_unknown_at_forecast_time', None) class GeneralSettings(msrest.serialization.Model): """GeneralSettings. :ivar primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError", "NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou". :vartype primary_metric: str or ~flow.models.PrimaryMetrics :ivar task_type: Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER", "TextClassificationMultilabel". :vartype task_type: str or ~flow.models.TaskType :ivar log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error", "Critical". :vartype log_verbosity: str or ~flow.models.LogVerbosity """ _attribute_map = { 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, 'task_type': {'key': 'taskType', 'type': 'str'}, 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError", "NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou". :paramtype primary_metric: str or ~flow.models.PrimaryMetrics :keyword task_type: Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER", "TextClassificationMultilabel". :paramtype task_type: str or ~flow.models.TaskType :keyword log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error", "Critical". :paramtype log_verbosity: str or ~flow.models.LogVerbosity """ super(GeneralSettings, self).__init__(**kwargs) self.primary_metric = kwargs.get('primary_metric', None) self.task_type = kwargs.get('task_type', None) self.log_verbosity = kwargs.get('log_verbosity', None) class GeneratePipelineComponentRequest(msrest.serialization.Model): """GeneratePipelineComponentRequest. :ivar name: :vartype name: str :ivar display_name: :vartype display_name: str :ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated". :vartype module_scope: str or ~flow.models.ModuleScope :ivar is_deterministic: :vartype is_deterministic: bool :ivar category: :vartype category: str :ivar version: :vartype version: str :ivar set_as_default_version: :vartype set_as_default_version: bool :ivar registry_name: :vartype registry_name: str :ivar graph: :vartype graph: ~flow.models.GraphDraftEntity :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar description: :vartype description: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'module_scope': {'key': 'moduleScope', 'type': 'str'}, 'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'}, 'category': {'key': 'category', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'}, 'registry_name': {'key': 'registryName', 'type': 'str'}, 'graph': {'key': 'graph', 'type': 'GraphDraftEntity'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword display_name: :paramtype display_name: str :keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated". :paramtype module_scope: str or ~flow.models.ModuleScope :keyword is_deterministic: :paramtype is_deterministic: bool :keyword category: :paramtype category: str :keyword version: :paramtype version: str :keyword set_as_default_version: :paramtype set_as_default_version: bool :keyword registry_name: :paramtype registry_name: str :keyword graph: :paramtype graph: ~flow.models.GraphDraftEntity :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword description: :paramtype description: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ super(GeneratePipelineComponentRequest, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.display_name = kwargs.get('display_name', None) self.module_scope = kwargs.get('module_scope', None) self.is_deterministic = kwargs.get('is_deterministic', None) self.category = kwargs.get('category', None) self.version = kwargs.get('version', None) self.set_as_default_version = kwargs.get('set_as_default_version', None) self.registry_name = kwargs.get('registry_name', None) self.graph = kwargs.get('graph', None) self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None) self.module_node_run_settings = kwargs.get('module_node_run_settings', None) self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None) self.tags = kwargs.get('tags', None) self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None) self.description = kwargs.get('description', None) self.properties = kwargs.get('properties', None) self.enforce_rerun = kwargs.get('enforce_rerun', None) self.dataset_access_modes = kwargs.get('dataset_access_modes', None) class GenerateToolMetaRequest(msrest.serialization.Model): """GenerateToolMetaRequest. :ivar tools: This is a dictionary. :vartype tools: dict[str, ~flow.models.ToolSourceMeta] :ivar working_dir: :vartype working_dir: str """ _attribute_map = { 'tools': {'key': 'tools', 'type': '{ToolSourceMeta}'}, 'working_dir': {'key': 'working_dir', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword tools: This is a dictionary. :paramtype tools: dict[str, ~flow.models.ToolSourceMeta] :keyword working_dir: :paramtype working_dir: str """ super(GenerateToolMetaRequest, self).__init__(**kwargs) self.tools = kwargs.get('tools', None) self.working_dir = kwargs.get('working_dir', None) class GetDynamicListRequest(msrest.serialization.Model): """GetDynamicListRequest. :ivar func_path: :vartype func_path: str :ivar func_kwargs: This is a dictionary. :vartype func_kwargs: dict[str, any] """ _attribute_map = { 'func_path': {'key': 'func_path', 'type': 'str'}, 'func_kwargs': {'key': 'func_kwargs', 'type': '{object}'}, } def __init__( self, **kwargs ): """ :keyword func_path: :paramtype func_path: str :keyword func_kwargs: This is a dictionary. :paramtype func_kwargs: dict[str, any] """ super(GetDynamicListRequest, self).__init__(**kwargs) self.func_path = kwargs.get('func_path', None) self.func_kwargs = kwargs.get('func_kwargs', None) class GetRunDataResultDto(msrest.serialization.Model): """GetRunDataResultDto. :ivar run_metadata: :vartype run_metadata: ~flow.models.RunDto :ivar run_definition: Anything. :vartype run_definition: any :ivar job_specification: Anything. :vartype job_specification: any :ivar system_settings: Dictionary of :code:`<string>`. :vartype system_settings: dict[str, str] """ _attribute_map = { 'run_metadata': {'key': 'runMetadata', 'type': 'RunDto'}, 'run_definition': {'key': 'runDefinition', 'type': 'object'}, 'job_specification': {'key': 'jobSpecification', 'type': 'object'}, 'system_settings': {'key': 'systemSettings', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword run_metadata: :paramtype run_metadata: ~flow.models.RunDto :keyword run_definition: Anything. :paramtype run_definition: any :keyword job_specification: Anything. :paramtype job_specification: any :keyword system_settings: Dictionary of :code:`<string>`. :paramtype system_settings: dict[str, str] """ super(GetRunDataResultDto, self).__init__(**kwargs) self.run_metadata = kwargs.get('run_metadata', None) self.run_definition = kwargs.get('run_definition', None) self.job_specification = kwargs.get('job_specification', None) self.system_settings = kwargs.get('system_settings', None) class GetTrainingSessionDto(msrest.serialization.Model): """GetTrainingSessionDto. :ivar properties: :vartype properties: ~flow.models.SessionProperties :ivar compute: :vartype compute: ~flow.models.ComputeContract """ _attribute_map = { 'properties': {'key': 'properties', 'type': 'SessionProperties'}, 'compute': {'key': 'compute', 'type': 'ComputeContract'}, } def __init__( self, **kwargs ): """ :keyword properties: :paramtype properties: ~flow.models.SessionProperties :keyword compute: :paramtype compute: ~flow.models.ComputeContract """ super(GetTrainingSessionDto, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.compute = kwargs.get('compute', None) class GlobalJobDispatcherConfiguration(msrest.serialization.Model): """GlobalJobDispatcherConfiguration. :ivar vm_size: :vartype vm_size: list[str] :ivar compute_type: Possible values include: "AmlCompute", "AmlK8s". :vartype compute_type: str or ~flow.models.GlobalJobDispatcherSupportedComputeType :ivar region: :vartype region: list[str] :ivar my_resource_only: :vartype my_resource_only: bool :ivar redispatch_allowed: :vartype redispatch_allowed: bool :ivar low_priority_vm_tolerant: :vartype low_priority_vm_tolerant: bool :ivar vc_list: :vartype vc_list: list[str] :ivar plan_id: :vartype plan_id: str :ivar plan_region_id: :vartype plan_region_id: str :ivar vc_block_list: :vartype vc_block_list: list[str] :ivar cluster_block_list: :vartype cluster_block_list: list[str] """ _attribute_map = { 'vm_size': {'key': 'vmSize', 'type': '[str]'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'region': {'key': 'region', 'type': '[str]'}, 'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'}, 'redispatch_allowed': {'key': 'redispatchAllowed', 'type': 'bool'}, 'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'}, 'vc_list': {'key': 'vcList', 'type': '[str]'}, 'plan_id': {'key': 'planId', 'type': 'str'}, 'plan_region_id': {'key': 'planRegionId', 'type': 'str'}, 'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'}, 'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword vm_size: :paramtype vm_size: list[str] :keyword compute_type: Possible values include: "AmlCompute", "AmlK8s". :paramtype compute_type: str or ~flow.models.GlobalJobDispatcherSupportedComputeType :keyword region: :paramtype region: list[str] :keyword my_resource_only: :paramtype my_resource_only: bool :keyword redispatch_allowed: :paramtype redispatch_allowed: bool :keyword low_priority_vm_tolerant: :paramtype low_priority_vm_tolerant: bool :keyword vc_list: :paramtype vc_list: list[str] :keyword plan_id: :paramtype plan_id: str :keyword plan_region_id: :paramtype plan_region_id: str :keyword vc_block_list: :paramtype vc_block_list: list[str] :keyword cluster_block_list: :paramtype cluster_block_list: list[str] """ super(GlobalJobDispatcherConfiguration, self).__init__(**kwargs) self.vm_size = kwargs.get('vm_size', None) self.compute_type = kwargs.get('compute_type', None) self.region = kwargs.get('region', None) self.my_resource_only = kwargs.get('my_resource_only', None) self.redispatch_allowed = kwargs.get('redispatch_allowed', None) self.low_priority_vm_tolerant = kwargs.get('low_priority_vm_tolerant', None) self.vc_list = kwargs.get('vc_list', None) self.plan_id = kwargs.get('plan_id', None) self.plan_region_id = kwargs.get('plan_region_id', None) self.vc_block_list = kwargs.get('vc_block_list', None) self.cluster_block_list = kwargs.get('cluster_block_list', None) class GlobsOptions(msrest.serialization.Model): """GlobsOptions. :ivar glob_patterns: :vartype glob_patterns: list[str] """ _attribute_map = { 'glob_patterns': {'key': 'globPatterns', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword glob_patterns: :paramtype glob_patterns: list[str] """ super(GlobsOptions, self).__init__(**kwargs) self.glob_patterns = kwargs.get('glob_patterns', None) class GraphAnnotationNode(msrest.serialization.Model): """GraphAnnotationNode. :ivar id: :vartype id: str :ivar content: :vartype content: str :ivar mentioned_node_names: :vartype mentioned_node_names: list[str] :ivar structured_content: :vartype structured_content: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'content': {'key': 'content', 'type': 'str'}, 'mentioned_node_names': {'key': 'mentionedNodeNames', 'type': '[str]'}, 'structured_content': {'key': 'structuredContent', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword content: :paramtype content: str :keyword mentioned_node_names: :paramtype mentioned_node_names: list[str] :keyword structured_content: :paramtype structured_content: str """ super(GraphAnnotationNode, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.content = kwargs.get('content', None) self.mentioned_node_names = kwargs.get('mentioned_node_names', None) self.structured_content = kwargs.get('structured_content', None) class GraphControlNode(msrest.serialization.Model): """GraphControlNode. :ivar id: :vartype id: str :ivar control_type: The only acceptable values to pass in are None and "IfElse". The default value is None. :vartype control_type: str :ivar control_parameter: :vartype control_parameter: ~flow.models.ParameterAssignment :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'control_type': {'key': 'controlType', 'type': 'str'}, 'control_parameter': {'key': 'controlParameter', 'type': 'ParameterAssignment'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword control_type: The only acceptable values to pass in are None and "IfElse". The default value is None. :paramtype control_type: str :keyword control_parameter: :paramtype control_parameter: ~flow.models.ParameterAssignment :keyword run_attribution: :paramtype run_attribution: str """ super(GraphControlNode, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.control_type = kwargs.get('control_type', None) self.control_parameter = kwargs.get('control_parameter', None) self.run_attribution = kwargs.get('run_attribution', None) class GraphControlReferenceNode(msrest.serialization.Model): """GraphControlReferenceNode. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar comment: :vartype comment: str :ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor". :vartype control_flow_type: str or ~flow.models.ControlFlowType :ivar reference_node_id: :vartype reference_node_id: str :ivar do_while_control_flow_info: :vartype do_while_control_flow_info: ~flow.models.DoWhileControlFlowInfo :ivar parallel_for_control_flow_info: :vartype parallel_for_control_flow_info: ~flow.models.ParallelForControlFlowInfo :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'comment': {'key': 'comment', 'type': 'str'}, 'control_flow_type': {'key': 'controlFlowType', 'type': 'str'}, 'reference_node_id': {'key': 'referenceNodeId', 'type': 'str'}, 'do_while_control_flow_info': {'key': 'doWhileControlFlowInfo', 'type': 'DoWhileControlFlowInfo'}, 'parallel_for_control_flow_info': {'key': 'parallelForControlFlowInfo', 'type': 'ParallelForControlFlowInfo'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword comment: :paramtype comment: str :keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor". :paramtype control_flow_type: str or ~flow.models.ControlFlowType :keyword reference_node_id: :paramtype reference_node_id: str :keyword do_while_control_flow_info: :paramtype do_while_control_flow_info: ~flow.models.DoWhileControlFlowInfo :keyword parallel_for_control_flow_info: :paramtype parallel_for_control_flow_info: ~flow.models.ParallelForControlFlowInfo :keyword run_attribution: :paramtype run_attribution: str """ super(GraphControlReferenceNode, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.name = kwargs.get('name', None) self.comment = kwargs.get('comment', None) self.control_flow_type = kwargs.get('control_flow_type', None) self.reference_node_id = kwargs.get('reference_node_id', None) self.do_while_control_flow_info = kwargs.get('do_while_control_flow_info', None) self.parallel_for_control_flow_info = kwargs.get('parallel_for_control_flow_info', None) self.run_attribution = kwargs.get('run_attribution', None) class GraphDatasetNode(msrest.serialization.Model): """GraphDatasetNode. :ivar id: :vartype id: str :ivar dataset_id: :vartype dataset_id: str :ivar data_path_parameter_name: :vartype data_path_parameter_name: str :ivar data_set_definition: :vartype data_set_definition: ~flow.models.DataSetDefinition """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'dataset_id': {'key': 'datasetId', 'type': 'str'}, 'data_path_parameter_name': {'key': 'dataPathParameterName', 'type': 'str'}, 'data_set_definition': {'key': 'dataSetDefinition', 'type': 'DataSetDefinition'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword dataset_id: :paramtype dataset_id: str :keyword data_path_parameter_name: :paramtype data_path_parameter_name: str :keyword data_set_definition: :paramtype data_set_definition: ~flow.models.DataSetDefinition """ super(GraphDatasetNode, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.dataset_id = kwargs.get('dataset_id', None) self.data_path_parameter_name = kwargs.get('data_path_parameter_name', None) self.data_set_definition = kwargs.get('data_set_definition', None) class GraphDraftEntity(msrest.serialization.Model): """GraphDraftEntity. :ivar module_nodes: :vartype module_nodes: list[~flow.models.GraphModuleNode] :ivar dataset_nodes: :vartype dataset_nodes: list[~flow.models.GraphDatasetNode] :ivar sub_graph_nodes: :vartype sub_graph_nodes: list[~flow.models.GraphReferenceNode] :ivar control_reference_nodes: :vartype control_reference_nodes: list[~flow.models.GraphControlReferenceNode] :ivar control_nodes: :vartype control_nodes: list[~flow.models.GraphControlNode] :ivar edges: :vartype edges: list[~flow.models.GraphEdge] :ivar entity_interface: :vartype entity_interface: ~flow.models.EntityInterface :ivar graph_layout: :vartype graph_layout: ~flow.models.GraphLayout :ivar created_by: :vartype created_by: ~flow.models.CreatedBy :ivar last_updated_by: :vartype last_updated_by: ~flow.models.CreatedBy :ivar default_compute: :vartype default_compute: ~flow.models.ComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.DatastoreSetting :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.CloudPrioritySetting :ivar extended_properties: This is a dictionary. :vartype extended_properties: dict[str, str] :ivar parent_sub_graph_module_ids: :vartype parent_sub_graph_module_ids: list[str] :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'module_nodes': {'key': 'moduleNodes', 'type': '[GraphModuleNode]'}, 'dataset_nodes': {'key': 'datasetNodes', 'type': '[GraphDatasetNode]'}, 'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[GraphReferenceNode]'}, 'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[GraphControlReferenceNode]'}, 'control_nodes': {'key': 'controlNodes', 'type': '[GraphControlNode]'}, 'edges': {'key': 'edges', 'type': '[GraphEdge]'}, 'entity_interface': {'key': 'entityInterface', 'type': 'EntityInterface'}, 'graph_layout': {'key': 'graphLayout', 'type': 'GraphLayout'}, 'created_by': {'key': 'createdBy', 'type': 'CreatedBy'}, 'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'}, 'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'}, 'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'}, 'extended_properties': {'key': 'extendedProperties', 'type': '{str}'}, 'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword module_nodes: :paramtype module_nodes: list[~flow.models.GraphModuleNode] :keyword dataset_nodes: :paramtype dataset_nodes: list[~flow.models.GraphDatasetNode] :keyword sub_graph_nodes: :paramtype sub_graph_nodes: list[~flow.models.GraphReferenceNode] :keyword control_reference_nodes: :paramtype control_reference_nodes: list[~flow.models.GraphControlReferenceNode] :keyword control_nodes: :paramtype control_nodes: list[~flow.models.GraphControlNode] :keyword edges: :paramtype edges: list[~flow.models.GraphEdge] :keyword entity_interface: :paramtype entity_interface: ~flow.models.EntityInterface :keyword graph_layout: :paramtype graph_layout: ~flow.models.GraphLayout :keyword created_by: :paramtype created_by: ~flow.models.CreatedBy :keyword last_updated_by: :paramtype last_updated_by: ~flow.models.CreatedBy :keyword default_compute: :paramtype default_compute: ~flow.models.ComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.DatastoreSetting :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting :keyword extended_properties: This is a dictionary. :paramtype extended_properties: dict[str, str] :keyword parent_sub_graph_module_ids: :paramtype parent_sub_graph_module_ids: list[str] :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(GraphDraftEntity, self).__init__(**kwargs) self.module_nodes = kwargs.get('module_nodes', None) self.dataset_nodes = kwargs.get('dataset_nodes', None) self.sub_graph_nodes = kwargs.get('sub_graph_nodes', None) self.control_reference_nodes = kwargs.get('control_reference_nodes', None) self.control_nodes = kwargs.get('control_nodes', None) self.edges = kwargs.get('edges', None) self.entity_interface = kwargs.get('entity_interface', None) self.graph_layout = kwargs.get('graph_layout', None) self.created_by = kwargs.get('created_by', None) self.last_updated_by = kwargs.get('last_updated_by', None) self.default_compute = kwargs.get('default_compute', None) self.default_datastore = kwargs.get('default_datastore', None) self.default_cloud_priority = kwargs.get('default_cloud_priority', None) self.extended_properties = kwargs.get('extended_properties', None) self.parent_sub_graph_module_ids = kwargs.get('parent_sub_graph_module_ids', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class GraphEdge(msrest.serialization.Model): """GraphEdge. :ivar source_output_port: :vartype source_output_port: ~flow.models.PortInfo :ivar destination_input_port: :vartype destination_input_port: ~flow.models.PortInfo """ _attribute_map = { 'source_output_port': {'key': 'sourceOutputPort', 'type': 'PortInfo'}, 'destination_input_port': {'key': 'destinationInputPort', 'type': 'PortInfo'}, } def __init__( self, **kwargs ): """ :keyword source_output_port: :paramtype source_output_port: ~flow.models.PortInfo :keyword destination_input_port: :paramtype destination_input_port: ~flow.models.PortInfo """ super(GraphEdge, self).__init__(**kwargs) self.source_output_port = kwargs.get('source_output_port', None) self.destination_input_port = kwargs.get('destination_input_port', None) class GraphLayout(msrest.serialization.Model): """GraphLayout. :ivar node_layouts: This is a dictionary. :vartype node_layouts: dict[str, ~flow.models.NodeLayout] :ivar extended_data: :vartype extended_data: str :ivar annotation_nodes: :vartype annotation_nodes: list[~flow.models.GraphAnnotationNode] :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'node_layouts': {'key': 'nodeLayouts', 'type': '{NodeLayout}'}, 'extended_data': {'key': 'extendedData', 'type': 'str'}, 'annotation_nodes': {'key': 'annotationNodes', 'type': '[GraphAnnotationNode]'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword node_layouts: This is a dictionary. :paramtype node_layouts: dict[str, ~flow.models.NodeLayout] :keyword extended_data: :paramtype extended_data: str :keyword annotation_nodes: :paramtype annotation_nodes: list[~flow.models.GraphAnnotationNode] :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(GraphLayout, self).__init__(**kwargs) self.node_layouts = kwargs.get('node_layouts', None) self.extended_data = kwargs.get('extended_data', None) self.annotation_nodes = kwargs.get('annotation_nodes', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class GraphLayoutCreationInfo(msrest.serialization.Model): """GraphLayoutCreationInfo. :ivar node_layouts: This is a dictionary. :vartype node_layouts: dict[str, ~flow.models.NodeLayout] :ivar extended_data: :vartype extended_data: str :ivar annotation_nodes: :vartype annotation_nodes: list[~flow.models.GraphAnnotationNode] """ _attribute_map = { 'node_layouts': {'key': 'nodeLayouts', 'type': '{NodeLayout}'}, 'extended_data': {'key': 'extendedData', 'type': 'str'}, 'annotation_nodes': {'key': 'annotationNodes', 'type': '[GraphAnnotationNode]'}, } def __init__( self, **kwargs ): """ :keyword node_layouts: This is a dictionary. :paramtype node_layouts: dict[str, ~flow.models.NodeLayout] :keyword extended_data: :paramtype extended_data: str :keyword annotation_nodes: :paramtype annotation_nodes: list[~flow.models.GraphAnnotationNode] """ super(GraphLayoutCreationInfo, self).__init__(**kwargs) self.node_layouts = kwargs.get('node_layouts', None) self.extended_data = kwargs.get('extended_data', None) self.annotation_nodes = kwargs.get('annotation_nodes', None) class GraphModuleNode(msrest.serialization.Model): """GraphModuleNode. :ivar module_type: Possible values include: "None", "BatchInferencing". :vartype module_type: str or ~flow.models.ModuleType :ivar runconfig: :vartype runconfig: str :ivar id: :vartype id: str :ivar module_id: :vartype module_id: str :ivar comment: :vartype comment: str :ivar name: :vartype name: str :ivar module_parameters: :vartype module_parameters: list[~flow.models.ParameterAssignment] :ivar module_metadata_parameters: :vartype module_metadata_parameters: list[~flow.models.ParameterAssignment] :ivar module_output_settings: :vartype module_output_settings: list[~flow.models.OutputSetting] :ivar module_input_settings: :vartype module_input_settings: list[~flow.models.InputSetting] :ivar use_graph_default_compute: :vartype use_graph_default_compute: bool :ivar use_graph_default_datastore: :vartype use_graph_default_datastore: bool :ivar regenerate_output: :vartype regenerate_output: bool :ivar control_inputs: :vartype control_inputs: list[~flow.models.ControlInput] :ivar cloud_settings: :vartype cloud_settings: ~flow.models.CloudSettings :ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :vartype execution_phase: str or ~flow.models.ExecutionPhase :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'module_type': {'key': 'moduleType', 'type': 'str'}, 'runconfig': {'key': 'runconfig', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'module_id': {'key': 'moduleId', 'type': 'str'}, 'comment': {'key': 'comment', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'module_parameters': {'key': 'moduleParameters', 'type': '[ParameterAssignment]'}, 'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[ParameterAssignment]'}, 'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[OutputSetting]'}, 'module_input_settings': {'key': 'moduleInputSettings', 'type': '[InputSetting]'}, 'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'}, 'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'}, 'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'}, 'control_inputs': {'key': 'controlInputs', 'type': '[ControlInput]'}, 'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'}, 'execution_phase': {'key': 'executionPhase', 'type': 'str'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword module_type: Possible values include: "None", "BatchInferencing". :paramtype module_type: str or ~flow.models.ModuleType :keyword runconfig: :paramtype runconfig: str :keyword id: :paramtype id: str :keyword module_id: :paramtype module_id: str :keyword comment: :paramtype comment: str :keyword name: :paramtype name: str :keyword module_parameters: :paramtype module_parameters: list[~flow.models.ParameterAssignment] :keyword module_metadata_parameters: :paramtype module_metadata_parameters: list[~flow.models.ParameterAssignment] :keyword module_output_settings: :paramtype module_output_settings: list[~flow.models.OutputSetting] :keyword module_input_settings: :paramtype module_input_settings: list[~flow.models.InputSetting] :keyword use_graph_default_compute: :paramtype use_graph_default_compute: bool :keyword use_graph_default_datastore: :paramtype use_graph_default_datastore: bool :keyword regenerate_output: :paramtype regenerate_output: bool :keyword control_inputs: :paramtype control_inputs: list[~flow.models.ControlInput] :keyword cloud_settings: :paramtype cloud_settings: ~flow.models.CloudSettings :keyword execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :paramtype execution_phase: str or ~flow.models.ExecutionPhase :keyword run_attribution: :paramtype run_attribution: str """ super(GraphModuleNode, self).__init__(**kwargs) self.module_type = kwargs.get('module_type', None) self.runconfig = kwargs.get('runconfig', None) self.id = kwargs.get('id', None) self.module_id = kwargs.get('module_id', None) self.comment = kwargs.get('comment', None) self.name = kwargs.get('name', None) self.module_parameters = kwargs.get('module_parameters', None) self.module_metadata_parameters = kwargs.get('module_metadata_parameters', None) self.module_output_settings = kwargs.get('module_output_settings', None) self.module_input_settings = kwargs.get('module_input_settings', None) self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None) self.use_graph_default_datastore = kwargs.get('use_graph_default_datastore', None) self.regenerate_output = kwargs.get('regenerate_output', None) self.control_inputs = kwargs.get('control_inputs', None) self.cloud_settings = kwargs.get('cloud_settings', None) self.execution_phase = kwargs.get('execution_phase', None) self.run_attribution = kwargs.get('run_attribution', None) class GraphModuleNodeRunSetting(msrest.serialization.Model): """GraphModuleNodeRunSetting. :ivar node_id: :vartype node_id: str :ivar module_id: :vartype module_id: str :ivar step_type: :vartype step_type: str :ivar run_settings: :vartype run_settings: list[~flow.models.RunSettingParameterAssignment] """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'module_id': {'key': 'moduleId', 'type': 'str'}, 'step_type': {'key': 'stepType', 'type': 'str'}, 'run_settings': {'key': 'runSettings', 'type': '[RunSettingParameterAssignment]'}, } def __init__( self, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword module_id: :paramtype module_id: str :keyword step_type: :paramtype step_type: str :keyword run_settings: :paramtype run_settings: list[~flow.models.RunSettingParameterAssignment] """ super(GraphModuleNodeRunSetting, self).__init__(**kwargs) self.node_id = kwargs.get('node_id', None) self.module_id = kwargs.get('module_id', None) self.step_type = kwargs.get('step_type', None) self.run_settings = kwargs.get('run_settings', None) class GraphModuleNodeUIInputSetting(msrest.serialization.Model): """GraphModuleNodeUIInputSetting. :ivar node_id: :vartype node_id: str :ivar module_id: :vartype module_id: str :ivar module_input_settings: :vartype module_input_settings: list[~flow.models.UIInputSetting] """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'module_id': {'key': 'moduleId', 'type': 'str'}, 'module_input_settings': {'key': 'moduleInputSettings', 'type': '[UIInputSetting]'}, } def __init__( self, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword module_id: :paramtype module_id: str :keyword module_input_settings: :paramtype module_input_settings: list[~flow.models.UIInputSetting] """ super(GraphModuleNodeUIInputSetting, self).__init__(**kwargs) self.node_id = kwargs.get('node_id', None) self.module_id = kwargs.get('module_id', None) self.module_input_settings = kwargs.get('module_input_settings', None) class GraphNodeStatusInfo(msrest.serialization.Model): """GraphNodeStatusInfo. :ivar status: Possible values include: "NotStarted", "Queued", "Running", "Failed", "Finished", "Canceled", "PartiallyExecuted", "Bypassed". :vartype status: str or ~flow.models.TaskStatusCode :ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype run_status: str or ~flow.models.RunStatus :ivar is_bypassed: :vartype is_bypassed: bool :ivar has_failed_child_run: :vartype has_failed_child_run: bool :ivar partially_executed: :vartype partially_executed: bool :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar aether_start_time: :vartype aether_start_time: ~datetime.datetime :ivar aether_end_time: :vartype aether_end_time: ~datetime.datetime :ivar aether_creation_time: :vartype aether_creation_time: ~datetime.datetime :ivar run_history_start_time: :vartype run_history_start_time: ~datetime.datetime :ivar run_history_end_time: :vartype run_history_end_time: ~datetime.datetime :ivar run_history_creation_time: :vartype run_history_creation_time: ~datetime.datetime :ivar reuse_info: :vartype reuse_info: ~flow.models.TaskReuseInfo :ivar control_flow_info: :vartype control_flow_info: ~flow.models.TaskControlFlowInfo :ivar status_code: Possible values include: "NotStarted", "Queued", "Running", "Failed", "Finished", "Canceled", "PartiallyExecuted", "Bypassed". :vartype status_code: str or ~flow.models.TaskStatusCode :ivar status_detail: :vartype status_detail: str :ivar creation_time: :vartype creation_time: ~datetime.datetime :ivar schedule_time: :vartype schedule_time: ~datetime.datetime :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar request_id: :vartype request_id: str :ivar run_id: :vartype run_id: str :ivar data_container_id: :vartype data_container_id: str :ivar real_time_log_path: :vartype real_time_log_path: str :ivar has_warnings: :vartype has_warnings: bool :ivar composite_node_id: :vartype composite_node_id: str """ _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'run_status': {'key': 'runStatus', 'type': 'str'}, 'is_bypassed': {'key': 'isBypassed', 'type': 'bool'}, 'has_failed_child_run': {'key': 'hasFailedChildRun', 'type': 'bool'}, 'partially_executed': {'key': 'partiallyExecuted', 'type': 'bool'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'}, 'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'}, 'aether_creation_time': {'key': 'aetherCreationTime', 'type': 'iso-8601'}, 'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'}, 'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'}, 'run_history_creation_time': {'key': 'runHistoryCreationTime', 'type': 'iso-8601'}, 'reuse_info': {'key': 'reuseInfo', 'type': 'TaskReuseInfo'}, 'control_flow_info': {'key': 'controlFlowInfo', 'type': 'TaskControlFlowInfo'}, 'status_code': {'key': 'statusCode', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'creation_time': {'key': 'creationTime', 'type': 'iso-8601'}, 'schedule_time': {'key': 'scheduleTime', 'type': 'iso-8601'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'request_id': {'key': 'requestId', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'data_container_id': {'key': 'dataContainerId', 'type': 'str'}, 'real_time_log_path': {'key': 'realTimeLogPath', 'type': 'str'}, 'has_warnings': {'key': 'hasWarnings', 'type': 'bool'}, 'composite_node_id': {'key': 'compositeNodeId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword status: Possible values include: "NotStarted", "Queued", "Running", "Failed", "Finished", "Canceled", "PartiallyExecuted", "Bypassed". :paramtype status: str or ~flow.models.TaskStatusCode :keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype run_status: str or ~flow.models.RunStatus :keyword is_bypassed: :paramtype is_bypassed: bool :keyword has_failed_child_run: :paramtype has_failed_child_run: bool :keyword partially_executed: :paramtype partially_executed: bool :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword aether_start_time: :paramtype aether_start_time: ~datetime.datetime :keyword aether_end_time: :paramtype aether_end_time: ~datetime.datetime :keyword aether_creation_time: :paramtype aether_creation_time: ~datetime.datetime :keyword run_history_start_time: :paramtype run_history_start_time: ~datetime.datetime :keyword run_history_end_time: :paramtype run_history_end_time: ~datetime.datetime :keyword run_history_creation_time: :paramtype run_history_creation_time: ~datetime.datetime :keyword reuse_info: :paramtype reuse_info: ~flow.models.TaskReuseInfo :keyword control_flow_info: :paramtype control_flow_info: ~flow.models.TaskControlFlowInfo :keyword status_code: Possible values include: "NotStarted", "Queued", "Running", "Failed", "Finished", "Canceled", "PartiallyExecuted", "Bypassed". :paramtype status_code: str or ~flow.models.TaskStatusCode :keyword status_detail: :paramtype status_detail: str :keyword creation_time: :paramtype creation_time: ~datetime.datetime :keyword schedule_time: :paramtype schedule_time: ~datetime.datetime :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword request_id: :paramtype request_id: str :keyword run_id: :paramtype run_id: str :keyword data_container_id: :paramtype data_container_id: str :keyword real_time_log_path: :paramtype real_time_log_path: str :keyword has_warnings: :paramtype has_warnings: bool :keyword composite_node_id: :paramtype composite_node_id: str """ super(GraphNodeStatusInfo, self).__init__(**kwargs) self.status = kwargs.get('status', None) self.run_status = kwargs.get('run_status', None) self.is_bypassed = kwargs.get('is_bypassed', None) self.has_failed_child_run = kwargs.get('has_failed_child_run', None) self.partially_executed = kwargs.get('partially_executed', None) self.properties = kwargs.get('properties', None) self.aether_start_time = kwargs.get('aether_start_time', None) self.aether_end_time = kwargs.get('aether_end_time', None) self.aether_creation_time = kwargs.get('aether_creation_time', None) self.run_history_start_time = kwargs.get('run_history_start_time', None) self.run_history_end_time = kwargs.get('run_history_end_time', None) self.run_history_creation_time = kwargs.get('run_history_creation_time', None) self.reuse_info = kwargs.get('reuse_info', None) self.control_flow_info = kwargs.get('control_flow_info', None) self.status_code = kwargs.get('status_code', None) self.status_detail = kwargs.get('status_detail', None) self.creation_time = kwargs.get('creation_time', None) self.schedule_time = kwargs.get('schedule_time', None) self.start_time = kwargs.get('start_time', None) self.end_time = kwargs.get('end_time', None) self.request_id = kwargs.get('request_id', None) self.run_id = kwargs.get('run_id', None) self.data_container_id = kwargs.get('data_container_id', None) self.real_time_log_path = kwargs.get('real_time_log_path', None) self.has_warnings = kwargs.get('has_warnings', None) self.composite_node_id = kwargs.get('composite_node_id', None) class GraphReferenceNode(msrest.serialization.Model): """GraphReferenceNode. :ivar graph_id: :vartype graph_id: str :ivar default_compute: :vartype default_compute: ~flow.models.ComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.DatastoreSetting :ivar id: :vartype id: str :ivar module_id: :vartype module_id: str :ivar comment: :vartype comment: str :ivar name: :vartype name: str :ivar module_parameters: :vartype module_parameters: list[~flow.models.ParameterAssignment] :ivar module_metadata_parameters: :vartype module_metadata_parameters: list[~flow.models.ParameterAssignment] :ivar module_output_settings: :vartype module_output_settings: list[~flow.models.OutputSetting] :ivar module_input_settings: :vartype module_input_settings: list[~flow.models.InputSetting] :ivar use_graph_default_compute: :vartype use_graph_default_compute: bool :ivar use_graph_default_datastore: :vartype use_graph_default_datastore: bool :ivar regenerate_output: :vartype regenerate_output: bool :ivar control_inputs: :vartype control_inputs: list[~flow.models.ControlInput] :ivar cloud_settings: :vartype cloud_settings: ~flow.models.CloudSettings :ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :vartype execution_phase: str or ~flow.models.ExecutionPhase :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'graph_id': {'key': 'graphId', 'type': 'str'}, 'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'}, 'id': {'key': 'id', 'type': 'str'}, 'module_id': {'key': 'moduleId', 'type': 'str'}, 'comment': {'key': 'comment', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'module_parameters': {'key': 'moduleParameters', 'type': '[ParameterAssignment]'}, 'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[ParameterAssignment]'}, 'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[OutputSetting]'}, 'module_input_settings': {'key': 'moduleInputSettings', 'type': '[InputSetting]'}, 'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'}, 'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'}, 'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'}, 'control_inputs': {'key': 'controlInputs', 'type': '[ControlInput]'}, 'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'}, 'execution_phase': {'key': 'executionPhase', 'type': 'str'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword graph_id: :paramtype graph_id: str :keyword default_compute: :paramtype default_compute: ~flow.models.ComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.DatastoreSetting :keyword id: :paramtype id: str :keyword module_id: :paramtype module_id: str :keyword comment: :paramtype comment: str :keyword name: :paramtype name: str :keyword module_parameters: :paramtype module_parameters: list[~flow.models.ParameterAssignment] :keyword module_metadata_parameters: :paramtype module_metadata_parameters: list[~flow.models.ParameterAssignment] :keyword module_output_settings: :paramtype module_output_settings: list[~flow.models.OutputSetting] :keyword module_input_settings: :paramtype module_input_settings: list[~flow.models.InputSetting] :keyword use_graph_default_compute: :paramtype use_graph_default_compute: bool :keyword use_graph_default_datastore: :paramtype use_graph_default_datastore: bool :keyword regenerate_output: :paramtype regenerate_output: bool :keyword control_inputs: :paramtype control_inputs: list[~flow.models.ControlInput] :keyword cloud_settings: :paramtype cloud_settings: ~flow.models.CloudSettings :keyword execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :paramtype execution_phase: str or ~flow.models.ExecutionPhase :keyword run_attribution: :paramtype run_attribution: str """ super(GraphReferenceNode, self).__init__(**kwargs) self.graph_id = kwargs.get('graph_id', None) self.default_compute = kwargs.get('default_compute', None) self.default_datastore = kwargs.get('default_datastore', None) self.id = kwargs.get('id', None) self.module_id = kwargs.get('module_id', None) self.comment = kwargs.get('comment', None) self.name = kwargs.get('name', None) self.module_parameters = kwargs.get('module_parameters', None) self.module_metadata_parameters = kwargs.get('module_metadata_parameters', None) self.module_output_settings = kwargs.get('module_output_settings', None) self.module_input_settings = kwargs.get('module_input_settings', None) self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None) self.use_graph_default_datastore = kwargs.get('use_graph_default_datastore', None) self.regenerate_output = kwargs.get('regenerate_output', None) self.control_inputs = kwargs.get('control_inputs', None) self.cloud_settings = kwargs.get('cloud_settings', None) self.execution_phase = kwargs.get('execution_phase', None) self.run_attribution = kwargs.get('run_attribution', None) class HdfsReference(msrest.serialization.Model): """HdfsReference. :ivar aml_data_store_name: :vartype aml_data_store_name: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword aml_data_store_name: :paramtype aml_data_store_name: str :keyword relative_path: :paramtype relative_path: str """ super(HdfsReference, self).__init__(**kwargs) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) self.relative_path = kwargs.get('relative_path', None) class HdiClusterComputeInfo(msrest.serialization.Model): """HdiClusterComputeInfo. :ivar address: :vartype address: str :ivar username: :vartype username: str :ivar password: :vartype password: str :ivar private_key: :vartype private_key: str """ _attribute_map = { 'address': {'key': 'address', 'type': 'str'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'private_key': {'key': 'privateKey', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword address: :paramtype address: str :keyword username: :paramtype username: str :keyword password: :paramtype password: str :keyword private_key: :paramtype private_key: str """ super(HdiClusterComputeInfo, self).__init__(**kwargs) self.address = kwargs.get('address', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.private_key = kwargs.get('private_key', None) class HdiConfiguration(msrest.serialization.Model): """HdiConfiguration. :ivar yarn_deploy_mode: Possible values include: "None", "Client", "Cluster". :vartype yarn_deploy_mode: str or ~flow.models.YarnDeployMode """ _attribute_map = { 'yarn_deploy_mode': {'key': 'yarnDeployMode', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword yarn_deploy_mode: Possible values include: "None", "Client", "Cluster". :paramtype yarn_deploy_mode: str or ~flow.models.YarnDeployMode """ super(HdiConfiguration, self).__init__(**kwargs) self.yarn_deploy_mode = kwargs.get('yarn_deploy_mode', None) class HdiRunConfiguration(msrest.serialization.Model): """HdiRunConfiguration. :ivar file: :vartype file: str :ivar class_name: :vartype class_name: str :ivar files: :vartype files: list[str] :ivar archives: :vartype archives: list[str] :ivar jars: :vartype jars: list[str] :ivar py_files: :vartype py_files: list[str] :ivar compute_name: :vartype compute_name: str :ivar queue: :vartype queue: str :ivar driver_memory: :vartype driver_memory: str :ivar driver_cores: :vartype driver_cores: int :ivar executor_memory: :vartype executor_memory: str :ivar executor_cores: :vartype executor_cores: int :ivar number_executors: :vartype number_executors: int :ivar conf: Dictionary of :code:`<string>`. :vartype conf: dict[str, str] :ivar name: :vartype name: str """ _attribute_map = { 'file': {'key': 'file', 'type': 'str'}, 'class_name': {'key': 'className', 'type': 'str'}, 'files': {'key': 'files', 'type': '[str]'}, 'archives': {'key': 'archives', 'type': '[str]'}, 'jars': {'key': 'jars', 'type': '[str]'}, 'py_files': {'key': 'pyFiles', 'type': '[str]'}, 'compute_name': {'key': 'computeName', 'type': 'str'}, 'queue': {'key': 'queue', 'type': 'str'}, 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, 'driver_cores': {'key': 'driverCores', 'type': 'int'}, 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, 'executor_cores': {'key': 'executorCores', 'type': 'int'}, 'number_executors': {'key': 'numberExecutors', 'type': 'int'}, 'conf': {'key': 'conf', 'type': '{str}'}, 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword file: :paramtype file: str :keyword class_name: :paramtype class_name: str :keyword files: :paramtype files: list[str] :keyword archives: :paramtype archives: list[str] :keyword jars: :paramtype jars: list[str] :keyword py_files: :paramtype py_files: list[str] :keyword compute_name: :paramtype compute_name: str :keyword queue: :paramtype queue: str :keyword driver_memory: :paramtype driver_memory: str :keyword driver_cores: :paramtype driver_cores: int :keyword executor_memory: :paramtype executor_memory: str :keyword executor_cores: :paramtype executor_cores: int :keyword number_executors: :paramtype number_executors: int :keyword conf: Dictionary of :code:`<string>`. :paramtype conf: dict[str, str] :keyword name: :paramtype name: str """ super(HdiRunConfiguration, self).__init__(**kwargs) self.file = kwargs.get('file', None) self.class_name = kwargs.get('class_name', None) self.files = kwargs.get('files', None) self.archives = kwargs.get('archives', None) self.jars = kwargs.get('jars', None) self.py_files = kwargs.get('py_files', None) self.compute_name = kwargs.get('compute_name', None) self.queue = kwargs.get('queue', None) self.driver_memory = kwargs.get('driver_memory', None) self.driver_cores = kwargs.get('driver_cores', None) self.executor_memory = kwargs.get('executor_memory', None) self.executor_cores = kwargs.get('executor_cores', None) self.number_executors = kwargs.get('number_executors', None) self.conf = kwargs.get('conf', None) self.name = kwargs.get('name', None) class HistoryConfiguration(msrest.serialization.Model): """HistoryConfiguration. :ivar output_collection: :vartype output_collection: bool :ivar directories_to_watch: :vartype directories_to_watch: list[str] :ivar enable_m_lflow_tracking: :vartype enable_m_lflow_tracking: bool """ _attribute_map = { 'output_collection': {'key': 'outputCollection', 'type': 'bool'}, 'directories_to_watch': {'key': 'directoriesToWatch', 'type': '[str]'}, 'enable_m_lflow_tracking': {'key': 'enableMLflowTracking', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword output_collection: :paramtype output_collection: bool :keyword directories_to_watch: :paramtype directories_to_watch: list[str] :keyword enable_m_lflow_tracking: :paramtype enable_m_lflow_tracking: bool """ super(HistoryConfiguration, self).__init__(**kwargs) self.output_collection = kwargs.get('output_collection', True) self.directories_to_watch = kwargs.get('directories_to_watch', ['logs']) self.enable_m_lflow_tracking = kwargs.get('enable_m_lflow_tracking', True) class HyperDriveConfiguration(msrest.serialization.Model): """HyperDriveConfiguration. :ivar hyper_drive_run_config: :vartype hyper_drive_run_config: str :ivar primary_metric_goal: :vartype primary_metric_goal: str :ivar primary_metric_name: :vartype primary_metric_name: str :ivar arguments: :vartype arguments: list[~flow.models.ArgumentAssignment] """ _attribute_map = { 'hyper_drive_run_config': {'key': 'hyperDriveRunConfig', 'type': 'str'}, 'primary_metric_goal': {'key': 'primaryMetricGoal', 'type': 'str'}, 'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'}, 'arguments': {'key': 'arguments', 'type': '[ArgumentAssignment]'}, } def __init__( self, **kwargs ): """ :keyword hyper_drive_run_config: :paramtype hyper_drive_run_config: str :keyword primary_metric_goal: :paramtype primary_metric_goal: str :keyword primary_metric_name: :paramtype primary_metric_name: str :keyword arguments: :paramtype arguments: list[~flow.models.ArgumentAssignment] """ super(HyperDriveConfiguration, self).__init__(**kwargs) self.hyper_drive_run_config = kwargs.get('hyper_drive_run_config', None) self.primary_metric_goal = kwargs.get('primary_metric_goal', None) self.primary_metric_name = kwargs.get('primary_metric_name', None) self.arguments = kwargs.get('arguments', None) class ICheckableLongRunningOperationResponse(msrest.serialization.Model): """ICheckableLongRunningOperationResponse. :ivar completion_result: Any object. :vartype completion_result: any :ivar location: :vartype location: str :ivar operation_result: :vartype operation_result: str """ _attribute_map = { 'completion_result': {'key': 'completionResult', 'type': 'object'}, 'location': {'key': 'location', 'type': 'str'}, 'operation_result': {'key': 'operationResult', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword completion_result: Any object. :paramtype completion_result: any :keyword location: :paramtype location: str :keyword operation_result: :paramtype operation_result: str """ super(ICheckableLongRunningOperationResponse, self).__init__(**kwargs) self.completion_result = kwargs.get('completion_result', None) self.location = kwargs.get('location', None) self.operation_result = kwargs.get('operation_result', None) class IdentityConfiguration(msrest.serialization.Model): """IdentityConfiguration. :ivar type: Possible values include: "Managed", "ServicePrincipal", "AMLToken". :vartype type: str or ~flow.models.IdentityType :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar secret: :vartype secret: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'secret': {'key': 'secret', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword type: Possible values include: "Managed", "ServicePrincipal", "AMLToken". :paramtype type: str or ~flow.models.IdentityType :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword secret: :paramtype secret: str """ super(IdentityConfiguration, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.properties = kwargs.get('properties', None) self.secret = kwargs.get('secret', None) class IdentitySetting(msrest.serialization.Model): """IdentitySetting. :ivar type: Possible values include: "UserIdentity", "Managed", "AMLToken". :vartype type: str or ~flow.models.AEVAIdentityType :ivar client_id: :vartype client_id: str :ivar object_id: :vartype object_id: str :ivar msi_resource_id: :vartype msi_resource_id: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'client_id': {'key': 'clientId', 'type': 'str'}, 'object_id': {'key': 'objectId', 'type': 'str'}, 'msi_resource_id': {'key': 'msiResourceId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword type: Possible values include: "UserIdentity", "Managed", "AMLToken". :paramtype type: str or ~flow.models.AEVAIdentityType :keyword client_id: :paramtype client_id: str :keyword object_id: :paramtype object_id: str :keyword msi_resource_id: :paramtype msi_resource_id: str """ super(IdentitySetting, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.client_id = kwargs.get('client_id', None) self.object_id = kwargs.get('object_id', None) self.msi_resource_id = kwargs.get('msi_resource_id', None) class ImportDataTask(msrest.serialization.Model): """ImportDataTask. :ivar data_transfer_source: :vartype data_transfer_source: ~flow.models.DataTransferSource """ _attribute_map = { 'data_transfer_source': {'key': 'DataTransferSource', 'type': 'DataTransferSource'}, } def __init__( self, **kwargs ): """ :keyword data_transfer_source: :paramtype data_transfer_source: ~flow.models.DataTransferSource """ super(ImportDataTask, self).__init__(**kwargs) self.data_transfer_source = kwargs.get('data_transfer_source', None) class IndexedErrorResponse(msrest.serialization.Model): """IndexedErrorResponse. :ivar code: :vartype code: str :ivar error_code_hierarchy: :vartype error_code_hierarchy: str :ivar message: :vartype message: str :ivar time: :vartype time: ~datetime.datetime :ivar component_name: :vartype component_name: str :ivar severity: :vartype severity: int :ivar details_uri: :vartype details_uri: str :ivar reference_code: :vartype reference_code: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'error_code_hierarchy': {'key': 'errorCodeHierarchy', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'time': {'key': 'time', 'type': 'iso-8601'}, 'component_name': {'key': 'componentName', 'type': 'str'}, 'severity': {'key': 'severity', 'type': 'int'}, 'details_uri': {'key': 'detailsUri', 'type': 'str'}, 'reference_code': {'key': 'referenceCode', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword code: :paramtype code: str :keyword error_code_hierarchy: :paramtype error_code_hierarchy: str :keyword message: :paramtype message: str :keyword time: :paramtype time: ~datetime.datetime :keyword component_name: :paramtype component_name: str :keyword severity: :paramtype severity: int :keyword details_uri: :paramtype details_uri: str :keyword reference_code: :paramtype reference_code: str """ super(IndexedErrorResponse, self).__init__(**kwargs) self.code = kwargs.get('code', None) self.error_code_hierarchy = kwargs.get('error_code_hierarchy', None) self.message = kwargs.get('message', None) self.time = kwargs.get('time', None) self.component_name = kwargs.get('component_name', None) self.severity = kwargs.get('severity', None) self.details_uri = kwargs.get('details_uri', None) self.reference_code = kwargs.get('reference_code', None) class InitScriptInfoDto(msrest.serialization.Model): """InitScriptInfoDto. :ivar dbfs: :vartype dbfs: ~flow.models.DbfsStorageInfoDto """ _attribute_map = { 'dbfs': {'key': 'dbfs', 'type': 'DbfsStorageInfoDto'}, } def __init__( self, **kwargs ): """ :keyword dbfs: :paramtype dbfs: ~flow.models.DbfsStorageInfoDto """ super(InitScriptInfoDto, self).__init__(**kwargs) self.dbfs = kwargs.get('dbfs', None) class InnerErrorDetails(msrest.serialization.Model): """InnerErrorDetails. :ivar code: :vartype code: str :ivar message: :vartype message: str :ivar target: :vartype target: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword code: :paramtype code: str :keyword message: :paramtype message: str :keyword target: :paramtype target: str """ super(InnerErrorDetails, self).__init__(**kwargs) self.code = kwargs.get('code', None) self.message = kwargs.get('message', None) self.target = kwargs.get('target', None) class InnerErrorResponse(msrest.serialization.Model): """A nested structure of errors. :ivar code: The error code. :vartype code: str :ivar inner_error: A nested structure of errors. :vartype inner_error: ~flow.models.InnerErrorResponse """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'}, } def __init__( self, **kwargs ): """ :keyword code: The error code. :paramtype code: str :keyword inner_error: A nested structure of errors. :paramtype inner_error: ~flow.models.InnerErrorResponse """ super(InnerErrorResponse, self).__init__(**kwargs) self.code = kwargs.get('code', None) self.inner_error = kwargs.get('inner_error', None) class InputAsset(msrest.serialization.Model): """InputAsset. :ivar asset: :vartype asset: ~flow.models.Asset :ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs". :vartype mechanism: str or ~flow.models.DeliveryMechanism :ivar environment_variable_name: :vartype environment_variable_name: str :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool :ivar options: Dictionary of :code:`<string>`. :vartype options: dict[str, str] """ _attribute_map = { 'asset': {'key': 'asset', 'type': 'Asset'}, 'mechanism': {'key': 'mechanism', 'type': 'str'}, 'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'options': {'key': 'options', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword asset: :paramtype asset: ~flow.models.Asset :keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs". :paramtype mechanism: str or ~flow.models.DeliveryMechanism :keyword environment_variable_name: :paramtype environment_variable_name: str :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool :keyword options: Dictionary of :code:`<string>`. :paramtype options: dict[str, str] """ super(InputAsset, self).__init__(**kwargs) self.asset = kwargs.get('asset', None) self.mechanism = kwargs.get('mechanism', None) self.environment_variable_name = kwargs.get('environment_variable_name', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.overwrite = kwargs.get('overwrite', None) self.options = kwargs.get('options', None) class InputData(msrest.serialization.Model): """InputData. :ivar dataset_id: :vartype dataset_id: str :ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :vartype mode: str or ~flow.models.DataBindingMode :ivar value: :vartype value: str """ _attribute_map = { 'dataset_id': {'key': 'datasetId', 'type': 'str'}, 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword dataset_id: :paramtype dataset_id: str :keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :paramtype mode: str or ~flow.models.DataBindingMode :keyword value: :paramtype value: str """ super(InputData, self).__init__(**kwargs) self.dataset_id = kwargs.get('dataset_id', None) self.mode = kwargs.get('mode', None) self.value = kwargs.get('value', None) class InputDataBinding(msrest.serialization.Model): """InputDataBinding. :ivar data_id: :vartype data_id: str :ivar path_on_compute: :vartype path_on_compute: str :ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :vartype mode: str or ~flow.models.DataBindingMode :ivar description: :vartype description: str :ivar uri: :vartype uri: ~flow.models.MfeInternalUriReference :ivar value: :vartype value: str :ivar asset_uri: :vartype asset_uri: str :ivar job_input_type: Possible values include: "Dataset", "Uri", "Literal", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :vartype job_input_type: str or ~flow.models.JobInputType """ _attribute_map = { 'data_id': {'key': 'dataId', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'mode': {'key': 'mode', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'MfeInternalUriReference'}, 'value': {'key': 'value', 'type': 'str'}, 'asset_uri': {'key': 'assetUri', 'type': 'str'}, 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_id: :paramtype data_id: str :keyword path_on_compute: :paramtype path_on_compute: str :keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :paramtype mode: str or ~flow.models.DataBindingMode :keyword description: :paramtype description: str :keyword uri: :paramtype uri: ~flow.models.MfeInternalUriReference :keyword value: :paramtype value: str :keyword asset_uri: :paramtype asset_uri: str :keyword job_input_type: Possible values include: "Dataset", "Uri", "Literal", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :paramtype job_input_type: str or ~flow.models.JobInputType """ super(InputDataBinding, self).__init__(**kwargs) self.data_id = kwargs.get('data_id', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.mode = kwargs.get('mode', None) self.description = kwargs.get('description', None) self.uri = kwargs.get('uri', None) self.value = kwargs.get('value', None) self.asset_uri = kwargs.get('asset_uri', None) self.job_input_type = kwargs.get('job_input_type', None) class InputDefinition(msrest.serialization.Model): """InputDefinition. :ivar name: :vartype name: str :ivar type: :vartype type: list[str or ~flow.models.ValueType] :ivar default: Anything. :vartype default: any :ivar description: :vartype description: str :ivar enum: :vartype enum: list[str] :ivar enabled_by: :vartype enabled_by: str :ivar enabled_by_type: :vartype enabled_by_type: list[str or ~flow.models.ValueType] :ivar enabled_by_value: :vartype enabled_by_value: list[any] :ivar model_list: :vartype model_list: list[str] :ivar capabilities: :vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities :ivar dynamic_list: :vartype dynamic_list: ~flow.models.ToolInputDynamicList :ivar allow_manual_entry: :vartype allow_manual_entry: bool :ivar is_multi_select: :vartype is_multi_select: bool :ivar generated_by: :vartype generated_by: ~flow.models.ToolInputGeneratedBy :ivar input_type: Possible values include: "default", "uionly_hidden". :vartype input_type: str or ~flow.models.InputType :ivar advanced: :vartype advanced: bool :ivar ui_hints: This is a dictionary. :vartype ui_hints: dict[str, any] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': '[str]'}, 'default': {'key': 'default', 'type': 'object'}, 'description': {'key': 'description', 'type': 'str'}, 'enum': {'key': 'enum', 'type': '[str]'}, 'enabled_by': {'key': 'enabled_by', 'type': 'str'}, 'enabled_by_type': {'key': 'enabled_by_type', 'type': '[str]'}, 'enabled_by_value': {'key': 'enabled_by_value', 'type': '[object]'}, 'model_list': {'key': 'model_list', 'type': '[str]'}, 'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'}, 'dynamic_list': {'key': 'dynamic_list', 'type': 'ToolInputDynamicList'}, 'allow_manual_entry': {'key': 'allow_manual_entry', 'type': 'bool'}, 'is_multi_select': {'key': 'is_multi_select', 'type': 'bool'}, 'generated_by': {'key': 'generated_by', 'type': 'ToolInputGeneratedBy'}, 'input_type': {'key': 'input_type', 'type': 'str'}, 'advanced': {'key': 'advanced', 'type': 'bool'}, 'ui_hints': {'key': 'ui_hints', 'type': '{object}'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: :paramtype type: list[str or ~flow.models.ValueType] :keyword default: Anything. :paramtype default: any :keyword description: :paramtype description: str :keyword enum: :paramtype enum: list[str] :keyword enabled_by: :paramtype enabled_by: str :keyword enabled_by_type: :paramtype enabled_by_type: list[str or ~flow.models.ValueType] :keyword enabled_by_value: :paramtype enabled_by_value: list[any] :keyword model_list: :paramtype model_list: list[str] :keyword capabilities: :paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities :keyword dynamic_list: :paramtype dynamic_list: ~flow.models.ToolInputDynamicList :keyword allow_manual_entry: :paramtype allow_manual_entry: bool :keyword is_multi_select: :paramtype is_multi_select: bool :keyword generated_by: :paramtype generated_by: ~flow.models.ToolInputGeneratedBy :keyword input_type: Possible values include: "default", "uionly_hidden". :paramtype input_type: str or ~flow.models.InputType :keyword advanced: :paramtype advanced: bool :keyword ui_hints: This is a dictionary. :paramtype ui_hints: dict[str, any] """ super(InputDefinition, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.type = kwargs.get('type', None) self.default = kwargs.get('default', None) self.description = kwargs.get('description', None) self.enum = kwargs.get('enum', None) self.enabled_by = kwargs.get('enabled_by', None) self.enabled_by_type = kwargs.get('enabled_by_type', None) self.enabled_by_value = kwargs.get('enabled_by_value', None) self.model_list = kwargs.get('model_list', None) self.capabilities = kwargs.get('capabilities', None) self.dynamic_list = kwargs.get('dynamic_list', None) self.allow_manual_entry = kwargs.get('allow_manual_entry', None) self.is_multi_select = kwargs.get('is_multi_select', None) self.generated_by = kwargs.get('generated_by', None) self.input_type = kwargs.get('input_type', None) self.advanced = kwargs.get('advanced', None) self.ui_hints = kwargs.get('ui_hints', None) class InputOutputPortMetadata(msrest.serialization.Model): """InputOutputPortMetadata. Variables are only populated by the server, and will be ignored when sending a request. :ivar graph_module_node_id: :vartype graph_module_node_id: str :ivar port_name: :vartype port_name: str :ivar schema: :vartype schema: str :ivar name: :vartype name: str :ivar id: :vartype id: str """ _validation = { 'id': {'readonly': True}, } _attribute_map = { 'graph_module_node_id': {'key': 'graphModuleNodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, 'schema': {'key': 'schema', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword graph_module_node_id: :paramtype graph_module_node_id: str :keyword port_name: :paramtype port_name: str :keyword schema: :paramtype schema: str :keyword name: :paramtype name: str """ super(InputOutputPortMetadata, self).__init__(**kwargs) self.graph_module_node_id = kwargs.get('graph_module_node_id', None) self.port_name = kwargs.get('port_name', None) self.schema = kwargs.get('schema', None) self.name = kwargs.get('name', None) self.id = None class InputSetting(msrest.serialization.Model): """InputSetting. :ivar name: :vartype name: str :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode :ivar path_on_compute: :vartype path_on_compute: str :ivar options: This is a dictionary. :vartype options: dict[str, str] :ivar additional_transformations: :vartype additional_transformations: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'options': {'key': 'options', 'type': '{str}'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode :keyword path_on_compute: :paramtype path_on_compute: str :keyword options: This is a dictionary. :paramtype options: dict[str, str] :keyword additional_transformations: :paramtype additional_transformations: str """ super(InputSetting, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.data_store_mode = kwargs.get('data_store_mode', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.options = kwargs.get('options', None) self.additional_transformations = kwargs.get('additional_transformations', None) class IntellectualPropertyPublisherInformation(msrest.serialization.Model): """IntellectualPropertyPublisherInformation. :ivar intellectual_property_publisher: :vartype intellectual_property_publisher: str """ _attribute_map = { 'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword intellectual_property_publisher: :paramtype intellectual_property_publisher: str """ super(IntellectualPropertyPublisherInformation, self).__init__(**kwargs) self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None) class InteractiveConfig(msrest.serialization.Model): """InteractiveConfig. :ivar is_ssh_enabled: :vartype is_ssh_enabled: bool :ivar ssh_public_key: :vartype ssh_public_key: str :ivar is_i_python_enabled: :vartype is_i_python_enabled: bool :ivar is_tensor_board_enabled: :vartype is_tensor_board_enabled: bool :ivar interactive_port: :vartype interactive_port: int """ _attribute_map = { 'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'}, 'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'}, 'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'}, 'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'}, 'interactive_port': {'key': 'interactivePort', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword is_ssh_enabled: :paramtype is_ssh_enabled: bool :keyword ssh_public_key: :paramtype ssh_public_key: str :keyword is_i_python_enabled: :paramtype is_i_python_enabled: bool :keyword is_tensor_board_enabled: :paramtype is_tensor_board_enabled: bool :keyword interactive_port: :paramtype interactive_port: int """ super(InteractiveConfig, self).__init__(**kwargs) self.is_ssh_enabled = kwargs.get('is_ssh_enabled', None) self.ssh_public_key = kwargs.get('ssh_public_key', None) self.is_i_python_enabled = kwargs.get('is_i_python_enabled', None) self.is_tensor_board_enabled = kwargs.get('is_tensor_board_enabled', None) self.interactive_port = kwargs.get('interactive_port', None) class InteractiveConfiguration(msrest.serialization.Model): """InteractiveConfiguration. :ivar is_ssh_enabled: :vartype is_ssh_enabled: bool :ivar ssh_public_key: :vartype ssh_public_key: str :ivar is_i_python_enabled: :vartype is_i_python_enabled: bool :ivar is_tensor_board_enabled: :vartype is_tensor_board_enabled: bool :ivar interactive_port: :vartype interactive_port: int """ _attribute_map = { 'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'}, 'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'}, 'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'}, 'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'}, 'interactive_port': {'key': 'interactivePort', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword is_ssh_enabled: :paramtype is_ssh_enabled: bool :keyword ssh_public_key: :paramtype ssh_public_key: str :keyword is_i_python_enabled: :paramtype is_i_python_enabled: bool :keyword is_tensor_board_enabled: :paramtype is_tensor_board_enabled: bool :keyword interactive_port: :paramtype interactive_port: int """ super(InteractiveConfiguration, self).__init__(**kwargs) self.is_ssh_enabled = kwargs.get('is_ssh_enabled', None) self.ssh_public_key = kwargs.get('ssh_public_key', None) self.is_i_python_enabled = kwargs.get('is_i_python_enabled', None) self.is_tensor_board_enabled = kwargs.get('is_tensor_board_enabled', None) self.interactive_port = kwargs.get('interactive_port', None) class JobCost(msrest.serialization.Model): """JobCost. :ivar charged_cpu_core_seconds: :vartype charged_cpu_core_seconds: float :ivar charged_cpu_memory_megabyte_seconds: :vartype charged_cpu_memory_megabyte_seconds: float :ivar charged_gpu_seconds: :vartype charged_gpu_seconds: float :ivar charged_node_utilization_seconds: :vartype charged_node_utilization_seconds: float """ _attribute_map = { 'charged_cpu_core_seconds': {'key': 'chargedCpuCoreSeconds', 'type': 'float'}, 'charged_cpu_memory_megabyte_seconds': {'key': 'chargedCpuMemoryMegabyteSeconds', 'type': 'float'}, 'charged_gpu_seconds': {'key': 'chargedGpuSeconds', 'type': 'float'}, 'charged_node_utilization_seconds': {'key': 'chargedNodeUtilizationSeconds', 'type': 'float'}, } def __init__( self, **kwargs ): """ :keyword charged_cpu_core_seconds: :paramtype charged_cpu_core_seconds: float :keyword charged_cpu_memory_megabyte_seconds: :paramtype charged_cpu_memory_megabyte_seconds: float :keyword charged_gpu_seconds: :paramtype charged_gpu_seconds: float :keyword charged_node_utilization_seconds: :paramtype charged_node_utilization_seconds: float """ super(JobCost, self).__init__(**kwargs) self.charged_cpu_core_seconds = kwargs.get('charged_cpu_core_seconds', None) self.charged_cpu_memory_megabyte_seconds = kwargs.get('charged_cpu_memory_megabyte_seconds', None) self.charged_gpu_seconds = kwargs.get('charged_gpu_seconds', None) self.charged_node_utilization_seconds = kwargs.get('charged_node_utilization_seconds', None) class JobEndpoint(msrest.serialization.Model): """JobEndpoint. :ivar type: :vartype type: str :ivar port: :vartype port: int :ivar endpoint: :vartype endpoint: str :ivar status: :vartype status: str :ivar error_message: :vartype error_message: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar nodes: :vartype nodes: ~flow.models.MfeInternalNodes """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'port': {'key': 'port', 'type': 'int'}, 'endpoint': {'key': 'endpoint', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'error_message': {'key': 'errorMessage', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'nodes': {'key': 'nodes', 'type': 'MfeInternalNodes'}, } def __init__( self, **kwargs ): """ :keyword type: :paramtype type: str :keyword port: :paramtype port: int :keyword endpoint: :paramtype endpoint: str :keyword status: :paramtype status: str :keyword error_message: :paramtype error_message: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword nodes: :paramtype nodes: ~flow.models.MfeInternalNodes """ super(JobEndpoint, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.port = kwargs.get('port', None) self.endpoint = kwargs.get('endpoint', None) self.status = kwargs.get('status', None) self.error_message = kwargs.get('error_message', None) self.properties = kwargs.get('properties', None) self.nodes = kwargs.get('nodes', None) class JobInput(msrest.serialization.Model): """JobInput. All required parameters must be populated in order to send to Azure. :ivar job_input_type: Required. Possible values include: "Dataset", "Uri", "Literal", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :vartype job_input_type: str or ~flow.models.JobInputType :ivar description: :vartype description: str """ _validation = { 'job_input_type': {'required': True}, } _attribute_map = { 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword job_input_type: Required. Possible values include: "Dataset", "Uri", "Literal", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :paramtype job_input_type: str or ~flow.models.JobInputType :keyword description: :paramtype description: str """ super(JobInput, self).__init__(**kwargs) self.job_input_type = kwargs['job_input_type'] self.description = kwargs.get('description', None) class JobOutput(msrest.serialization.Model): """JobOutput. All required parameters must be populated in order to send to Azure. :ivar job_output_type: Required. Possible values include: "Uri", "Dataset", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :vartype job_output_type: str or ~flow.models.JobOutputType :ivar description: :vartype description: str :ivar auto_delete_setting: :vartype auto_delete_setting: ~flow.models.AutoDeleteSetting """ _validation = { 'job_output_type': {'required': True}, } _attribute_map = { 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, } def __init__( self, **kwargs ): """ :keyword job_output_type: Required. Possible values include: "Uri", "Dataset", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :paramtype job_output_type: str or ~flow.models.JobOutputType :keyword description: :paramtype description: str :keyword auto_delete_setting: :paramtype auto_delete_setting: ~flow.models.AutoDeleteSetting """ super(JobOutput, self).__init__(**kwargs) self.job_output_type = kwargs['job_output_type'] self.description = kwargs.get('description', None) self.auto_delete_setting = kwargs.get('auto_delete_setting', None) class JobOutputArtifacts(msrest.serialization.Model): """JobOutputArtifacts. :ivar datastore_id: :vartype datastore_id: str :ivar path: :vartype path: str """ _attribute_map = { 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, 'path': {'key': 'path', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword datastore_id: :paramtype datastore_id: str :keyword path: :paramtype path: str """ super(JobOutputArtifacts, self).__init__(**kwargs) self.datastore_id = kwargs.get('datastore_id', None) self.path = kwargs.get('path', None) class JobScheduleDto(msrest.serialization.Model): """JobScheduleDto. :ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :vartype job_type: str or ~flow.models.JobType :ivar system_data: :vartype system_data: ~flow.models.SystemData :ivar name: :vartype name: str :ivar job_definition_id: :vartype job_definition_id: str :ivar display_name: :vartype display_name: str :ivar trigger_type: Possible values include: "Recurrence", "Cron". :vartype trigger_type: str or ~flow.models.TriggerType :ivar recurrence: :vartype recurrence: ~flow.models.Recurrence :ivar cron: :vartype cron: ~flow.models.Cron :ivar status: Possible values include: "Enabled", "Disabled". :vartype status: str or ~flow.models.ScheduleStatus :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'job_type': {'key': 'jobType', 'type': 'str'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'name': {'key': 'name', 'type': 'str'}, 'job_definition_id': {'key': 'jobDefinitionId', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'trigger_type': {'key': 'triggerType', 'type': 'str'}, 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, 'cron': {'key': 'cron', 'type': 'Cron'}, 'status': {'key': 'status', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :paramtype job_type: str or ~flow.models.JobType :keyword system_data: :paramtype system_data: ~flow.models.SystemData :keyword name: :paramtype name: str :keyword job_definition_id: :paramtype job_definition_id: str :keyword display_name: :paramtype display_name: str :keyword trigger_type: Possible values include: "Recurrence", "Cron". :paramtype trigger_type: str or ~flow.models.TriggerType :keyword recurrence: :paramtype recurrence: ~flow.models.Recurrence :keyword cron: :paramtype cron: ~flow.models.Cron :keyword status: Possible values include: "Enabled", "Disabled". :paramtype status: str or ~flow.models.ScheduleStatus :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(JobScheduleDto, self).__init__(**kwargs) self.job_type = kwargs.get('job_type', None) self.system_data = kwargs.get('system_data', None) self.name = kwargs.get('name', None) self.job_definition_id = kwargs.get('job_definition_id', None) self.display_name = kwargs.get('display_name', None) self.trigger_type = kwargs.get('trigger_type', None) self.recurrence = kwargs.get('recurrence', None) self.cron = kwargs.get('cron', None) self.status = kwargs.get('status', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) class K8SConfiguration(msrest.serialization.Model): """K8SConfiguration. :ivar max_retry_count: :vartype max_retry_count: int :ivar resource_configuration: :vartype resource_configuration: ~flow.models.ResourceConfig :ivar priority_configuration: :vartype priority_configuration: ~flow.models.PriorityConfig :ivar interactive_configuration: :vartype interactive_configuration: ~flow.models.InteractiveConfig """ _attribute_map = { 'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'}, 'resource_configuration': {'key': 'resourceConfiguration', 'type': 'ResourceConfig'}, 'priority_configuration': {'key': 'priorityConfiguration', 'type': 'PriorityConfig'}, 'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'InteractiveConfig'}, } def __init__( self, **kwargs ): """ :keyword max_retry_count: :paramtype max_retry_count: int :keyword resource_configuration: :paramtype resource_configuration: ~flow.models.ResourceConfig :keyword priority_configuration: :paramtype priority_configuration: ~flow.models.PriorityConfig :keyword interactive_configuration: :paramtype interactive_configuration: ~flow.models.InteractiveConfig """ super(K8SConfiguration, self).__init__(**kwargs) self.max_retry_count = kwargs.get('max_retry_count', None) self.resource_configuration = kwargs.get('resource_configuration', None) self.priority_configuration = kwargs.get('priority_configuration', None) self.interactive_configuration = kwargs.get('interactive_configuration', None) class KeyValuePairComponentNameMetaInfoErrorResponse(msrest.serialization.Model): """KeyValuePairComponentNameMetaInfoErrorResponse. :ivar key: :vartype key: ~flow.models.ComponentNameMetaInfo :ivar value: The error response. :vartype value: ~flow.models.ErrorResponse """ _attribute_map = { 'key': {'key': 'key', 'type': 'ComponentNameMetaInfo'}, 'value': {'key': 'value', 'type': 'ErrorResponse'}, } def __init__( self, **kwargs ): """ :keyword key: :paramtype key: ~flow.models.ComponentNameMetaInfo :keyword value: The error response. :paramtype value: ~flow.models.ErrorResponse """ super(KeyValuePairComponentNameMetaInfoErrorResponse, self).__init__(**kwargs) self.key = kwargs.get('key', None) self.value = kwargs.get('value', None) class KeyValuePairComponentNameMetaInfoModuleDto(msrest.serialization.Model): """KeyValuePairComponentNameMetaInfoModuleDto. :ivar key: :vartype key: ~flow.models.ComponentNameMetaInfo :ivar value: :vartype value: ~flow.models.ModuleDto """ _attribute_map = { 'key': {'key': 'key', 'type': 'ComponentNameMetaInfo'}, 'value': {'key': 'value', 'type': 'ModuleDto'}, } def __init__( self, **kwargs ): """ :keyword key: :paramtype key: ~flow.models.ComponentNameMetaInfo :keyword value: :paramtype value: ~flow.models.ModuleDto """ super(KeyValuePairComponentNameMetaInfoModuleDto, self).__init__(**kwargs) self.key = kwargs.get('key', None) self.value = kwargs.get('value', None) class KeyValuePairStringObject(msrest.serialization.Model): """KeyValuePairStringObject. :ivar key: :vartype key: str :ivar value: Anything. :vartype value: any """ _attribute_map = { 'key': {'key': 'key', 'type': 'str'}, 'value': {'key': 'value', 'type': 'object'}, } def __init__( self, **kwargs ): """ :keyword key: :paramtype key: str :keyword value: Anything. :paramtype value: any """ super(KeyValuePairStringObject, self).__init__(**kwargs) self.key = kwargs.get('key', None) self.value = kwargs.get('value', None) class KubernetesConfiguration(msrest.serialization.Model): """KubernetesConfiguration. :ivar instance_type: :vartype instance_type: str """ _attribute_map = { 'instance_type': {'key': 'instanceType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword instance_type: :paramtype instance_type: str """ super(KubernetesConfiguration, self).__init__(**kwargs) self.instance_type = kwargs.get('instance_type', None) class Kwarg(msrest.serialization.Model): """Kwarg. :ivar key: :vartype key: str :ivar value: :vartype value: str """ _attribute_map = { 'key': {'key': 'key', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword key: :paramtype key: str :keyword value: :paramtype value: str """ super(Kwarg, self).__init__(**kwargs) self.key = kwargs.get('key', None) self.value = kwargs.get('value', None) class LegacyDataPath(msrest.serialization.Model): """LegacyDataPath. :ivar data_store_name: :vartype data_store_name: str :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode :keyword relative_path: :paramtype relative_path: str """ super(LegacyDataPath, self).__init__(**kwargs) self.data_store_name = kwargs.get('data_store_name', None) self.data_store_mode = kwargs.get('data_store_mode', None) self.relative_path = kwargs.get('relative_path', None) class LimitSettings(msrest.serialization.Model): """LimitSettings. :ivar max_trials: :vartype max_trials: int :ivar timeout: :vartype timeout: str :ivar trial_timeout: :vartype trial_timeout: str :ivar max_concurrent_trials: :vartype max_concurrent_trials: int :ivar max_cores_per_trial: :vartype max_cores_per_trial: int :ivar exit_score: :vartype exit_score: float :ivar enable_early_termination: :vartype enable_early_termination: bool :ivar max_nodes: :vartype max_nodes: int """ _attribute_map = { 'max_trials': {'key': 'maxTrials', 'type': 'int'}, 'timeout': {'key': 'timeout', 'type': 'str'}, 'trial_timeout': {'key': 'trialTimeout', 'type': 'str'}, 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, 'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'}, 'exit_score': {'key': 'exitScore', 'type': 'float'}, 'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'}, 'max_nodes': {'key': 'maxNodes', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword max_trials: :paramtype max_trials: int :keyword timeout: :paramtype timeout: str :keyword trial_timeout: :paramtype trial_timeout: str :keyword max_concurrent_trials: :paramtype max_concurrent_trials: int :keyword max_cores_per_trial: :paramtype max_cores_per_trial: int :keyword exit_score: :paramtype exit_score: float :keyword enable_early_termination: :paramtype enable_early_termination: bool :keyword max_nodes: :paramtype max_nodes: int """ super(LimitSettings, self).__init__(**kwargs) self.max_trials = kwargs.get('max_trials', None) self.timeout = kwargs.get('timeout', None) self.trial_timeout = kwargs.get('trial_timeout', None) self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None) self.max_cores_per_trial = kwargs.get('max_cores_per_trial', None) self.exit_score = kwargs.get('exit_score', None) self.enable_early_termination = kwargs.get('enable_early_termination', None) self.max_nodes = kwargs.get('max_nodes', None) class LinkedADBWorkspaceMetadata(msrest.serialization.Model): """LinkedADBWorkspaceMetadata. :ivar workspace_id: :vartype workspace_id: str :ivar region: :vartype region: str """ _attribute_map = { 'workspace_id': {'key': 'workspaceId', 'type': 'str'}, 'region': {'key': 'region', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword workspace_id: :paramtype workspace_id: str :keyword region: :paramtype region: str """ super(LinkedADBWorkspaceMetadata, self).__init__(**kwargs) self.workspace_id = kwargs.get('workspace_id', None) self.region = kwargs.get('region', None) class LinkedPipelineInfo(msrest.serialization.Model): """LinkedPipelineInfo. :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar module_node_id: :vartype module_node_id: str :ivar port_name: :vartype port_name: str :ivar linked_pipeline_draft_id: :vartype linked_pipeline_draft_id: str :ivar linked_pipeline_run_id: :vartype linked_pipeline_run_id: str :ivar is_direct_link: :vartype is_direct_link: bool """ _attribute_map = { 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'module_node_id': {'key': 'moduleNodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, 'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'}, 'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'}, 'is_direct_link': {'key': 'isDirectLink', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword module_node_id: :paramtype module_node_id: str :keyword port_name: :paramtype port_name: str :keyword linked_pipeline_draft_id: :paramtype linked_pipeline_draft_id: str :keyword linked_pipeline_run_id: :paramtype linked_pipeline_run_id: str :keyword is_direct_link: :paramtype is_direct_link: bool """ super(LinkedPipelineInfo, self).__init__(**kwargs) self.pipeline_type = kwargs.get('pipeline_type', None) self.module_node_id = kwargs.get('module_node_id', None) self.port_name = kwargs.get('port_name', None) self.linked_pipeline_draft_id = kwargs.get('linked_pipeline_draft_id', None) self.linked_pipeline_run_id = kwargs.get('linked_pipeline_run_id', None) self.is_direct_link = kwargs.get('is_direct_link', None) class LoadFlowAsComponentRequest(msrest.serialization.Model): """LoadFlowAsComponentRequest. :ivar component_name: :vartype component_name: str :ivar component_version: :vartype component_version: str :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar is_deterministic: :vartype is_deterministic: bool :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar flow_definition_resource_id: :vartype flow_definition_resource_id: str :ivar flow_definition_data_store_name: :vartype flow_definition_data_store_name: str :ivar flow_definition_blob_path: :vartype flow_definition_blob_path: str :ivar flow_definition_data_uri: :vartype flow_definition_data_uri: str :ivar node_variant: :vartype node_variant: str :ivar inputs_mapping: This is a dictionary. :vartype inputs_mapping: dict[str, str] :ivar connections: This is a dictionary. :vartype connections: dict[str, dict[str, str]] :ivar environment_variables: This is a dictionary. :vartype environment_variables: dict[str, str] :ivar runtime_name: :vartype runtime_name: str :ivar session_id: :vartype session_id: str :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long """ _attribute_map = { 'component_name': {'key': 'componentName', 'type': 'str'}, 'component_version': {'key': 'componentVersion', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'flow_definition_resource_id': {'key': 'flowDefinitionResourceId', 'type': 'str'}, 'flow_definition_data_store_name': {'key': 'flowDefinitionDataStoreName', 'type': 'str'}, 'flow_definition_blob_path': {'key': 'flowDefinitionBlobPath', 'type': 'str'}, 'flow_definition_data_uri': {'key': 'flowDefinitionDataUri', 'type': 'str'}, 'node_variant': {'key': 'nodeVariant', 'type': 'str'}, 'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'}, 'connections': {'key': 'connections', 'type': '{{str}}'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, } def __init__( self, **kwargs ): """ :keyword component_name: :paramtype component_name: str :keyword component_version: :paramtype component_version: str :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword is_deterministic: :paramtype is_deterministic: bool :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword flow_definition_resource_id: :paramtype flow_definition_resource_id: str :keyword flow_definition_data_store_name: :paramtype flow_definition_data_store_name: str :keyword flow_definition_blob_path: :paramtype flow_definition_blob_path: str :keyword flow_definition_data_uri: :paramtype flow_definition_data_uri: str :keyword node_variant: :paramtype node_variant: str :keyword inputs_mapping: This is a dictionary. :paramtype inputs_mapping: dict[str, str] :keyword connections: This is a dictionary. :paramtype connections: dict[str, dict[str, str]] :keyword environment_variables: This is a dictionary. :paramtype environment_variables: dict[str, str] :keyword runtime_name: :paramtype runtime_name: str :keyword session_id: :paramtype session_id: str :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long """ super(LoadFlowAsComponentRequest, self).__init__(**kwargs) self.component_name = kwargs.get('component_name', None) self.component_version = kwargs.get('component_version', None) self.display_name = kwargs.get('display_name', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.is_deterministic = kwargs.get('is_deterministic', None) self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None) self.flow_definition_resource_id = kwargs.get('flow_definition_resource_id', None) self.flow_definition_data_store_name = kwargs.get('flow_definition_data_store_name', None) self.flow_definition_blob_path = kwargs.get('flow_definition_blob_path', None) self.flow_definition_data_uri = kwargs.get('flow_definition_data_uri', None) self.node_variant = kwargs.get('node_variant', None) self.inputs_mapping = kwargs.get('inputs_mapping', None) self.connections = kwargs.get('connections', None) self.environment_variables = kwargs.get('environment_variables', None) self.runtime_name = kwargs.get('runtime_name', None) self.session_id = kwargs.get('session_id', None) self.vm_size = kwargs.get('vm_size', None) self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None) class LogRunTerminatedEventDto(msrest.serialization.Model): """LogRunTerminatedEventDto. :ivar next_action_interval_in_seconds: :vartype next_action_interval_in_seconds: int :ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent". :vartype action_type: str or ~flow.models.ActionType :ivar last_checked_time: :vartype last_checked_time: ~datetime.datetime """ _attribute_map = { 'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'}, 'action_type': {'key': 'actionType', 'type': 'str'}, 'last_checked_time': {'key': 'lastCheckedTime', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword next_action_interval_in_seconds: :paramtype next_action_interval_in_seconds: int :keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent". :paramtype action_type: str or ~flow.models.ActionType :keyword last_checked_time: :paramtype last_checked_time: ~datetime.datetime """ super(LogRunTerminatedEventDto, self).__init__(**kwargs) self.next_action_interval_in_seconds = kwargs.get('next_action_interval_in_seconds', None) self.action_type = kwargs.get('action_type', None) self.last_checked_time = kwargs.get('last_checked_time', None) class LongRunningOperationUriResponse(msrest.serialization.Model): """LongRunningOperationUriResponse. :ivar location: :vartype location: str :ivar operation_result: :vartype operation_result: str """ _attribute_map = { 'location': {'key': 'location', 'type': 'str'}, 'operation_result': {'key': 'operationResult', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword location: :paramtype location: str :keyword operation_result: :paramtype operation_result: str """ super(LongRunningOperationUriResponse, self).__init__(**kwargs) self.location = kwargs.get('location', None) self.operation_result = kwargs.get('operation_result', None) class LongRunningUpdateRegistryComponentRequest(msrest.serialization.Model): """LongRunningUpdateRegistryComponentRequest. :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar registry_name: :vartype registry_name: str :ivar component_name: :vartype component_name: str :ivar component_version: :vartype component_version: str :ivar update_type: Possible values include: "EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags". :vartype update_type: str or ~flow.models.LongRunningUpdateType """ _attribute_map = { 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'registry_name': {'key': 'registryName', 'type': 'str'}, 'component_name': {'key': 'componentName', 'type': 'str'}, 'component_version': {'key': 'componentVersion', 'type': 'str'}, 'update_type': {'key': 'updateType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword registry_name: :paramtype registry_name: str :keyword component_name: :paramtype component_name: str :keyword component_version: :paramtype component_version: str :keyword update_type: Possible values include: "EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags". :paramtype update_type: str or ~flow.models.LongRunningUpdateType """ super(LongRunningUpdateRegistryComponentRequest, self).__init__(**kwargs) self.display_name = kwargs.get('display_name', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.registry_name = kwargs.get('registry_name', None) self.component_name = kwargs.get('component_name', None) self.component_version = kwargs.get('component_version', None) self.update_type = kwargs.get('update_type', None) class ManagedServiceIdentity(msrest.serialization.Model): """ManagedServiceIdentity. All required parameters must be populated in order to send to Azure. :ivar type: Required. Possible values include: "SystemAssigned", "UserAssigned", "SystemAssignedUserAssigned", "None". :vartype type: str or ~flow.models.ManagedServiceIdentityType :ivar principal_id: :vartype principal_id: str :ivar tenant_id: :vartype tenant_id: str :ivar user_assigned_identities: Dictionary of :code:`<UserAssignedIdentity>`. :vartype user_assigned_identities: dict[str, ~flow.models.UserAssignedIdentity] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, } def __init__( self, **kwargs ): """ :keyword type: Required. Possible values include: "SystemAssigned", "UserAssigned", "SystemAssignedUserAssigned", "None". :paramtype type: str or ~flow.models.ManagedServiceIdentityType :keyword principal_id: :paramtype principal_id: str :keyword tenant_id: :paramtype tenant_id: str :keyword user_assigned_identities: Dictionary of :code:`<UserAssignedIdentity>`. :paramtype user_assigned_identities: dict[str, ~flow.models.UserAssignedIdentity] """ super(ManagedServiceIdentity, self).__init__(**kwargs) self.type = kwargs['type'] self.principal_id = kwargs.get('principal_id', None) self.tenant_id = kwargs.get('tenant_id', None) self.user_assigned_identities = kwargs.get('user_assigned_identities', None) class MavenLibraryDto(msrest.serialization.Model): """MavenLibraryDto. :ivar coordinates: :vartype coordinates: str :ivar repo: :vartype repo: str :ivar exclusions: :vartype exclusions: list[str] """ _attribute_map = { 'coordinates': {'key': 'coordinates', 'type': 'str'}, 'repo': {'key': 'repo', 'type': 'str'}, 'exclusions': {'key': 'exclusions', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword coordinates: :paramtype coordinates: str :keyword repo: :paramtype repo: str :keyword exclusions: :paramtype exclusions: list[str] """ super(MavenLibraryDto, self).__init__(**kwargs) self.coordinates = kwargs.get('coordinates', None) self.repo = kwargs.get('repo', None) self.exclusions = kwargs.get('exclusions', None) class MetricProperties(msrest.serialization.Model): """MetricProperties. :ivar ux_metric_type: :vartype ux_metric_type: str """ _attribute_map = { 'ux_metric_type': {'key': 'uxMetricType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword ux_metric_type: :paramtype ux_metric_type: str """ super(MetricProperties, self).__init__(**kwargs) self.ux_metric_type = kwargs.get('ux_metric_type', None) class MetricSchemaDto(msrest.serialization.Model): """MetricSchemaDto. :ivar num_properties: :vartype num_properties: int :ivar properties: :vartype properties: list[~flow.models.MetricSchemaPropertyDto] """ _attribute_map = { 'num_properties': {'key': 'numProperties', 'type': 'int'}, 'properties': {'key': 'properties', 'type': '[MetricSchemaPropertyDto]'}, } def __init__( self, **kwargs ): """ :keyword num_properties: :paramtype num_properties: int :keyword properties: :paramtype properties: list[~flow.models.MetricSchemaPropertyDto] """ super(MetricSchemaDto, self).__init__(**kwargs) self.num_properties = kwargs.get('num_properties', None) self.properties = kwargs.get('properties', None) class MetricSchemaPropertyDto(msrest.serialization.Model): """MetricSchemaPropertyDto. :ivar property_id: :vartype property_id: str :ivar name: :vartype name: str :ivar type: :vartype type: str """ _attribute_map = { 'property_id': {'key': 'propertyId', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword property_id: :paramtype property_id: str :keyword name: :paramtype name: str :keyword type: :paramtype type: str """ super(MetricSchemaPropertyDto, self).__init__(**kwargs) self.property_id = kwargs.get('property_id', None) self.name = kwargs.get('name', None) self.type = kwargs.get('type', None) class MetricV2Dto(msrest.serialization.Model): """MetricV2Dto. :ivar data_container_id: :vartype data_container_id: str :ivar name: :vartype name: str :ivar columns: This is a dictionary. :vartype columns: dict[str, str or ~flow.models.MetricValueType] :ivar properties: :vartype properties: ~flow.models.MetricProperties :ivar namespace: :vartype namespace: str :ivar standard_schema_id: :vartype standard_schema_id: str :ivar value: :vartype value: list[~flow.models.MetricV2Value] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'data_container_id': {'key': 'dataContainerId', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'columns': {'key': 'columns', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': 'MetricProperties'}, 'namespace': {'key': 'namespace', 'type': 'str'}, 'standard_schema_id': {'key': 'standardSchemaId', 'type': 'str'}, 'value': {'key': 'value', 'type': '[MetricV2Value]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_container_id: :paramtype data_container_id: str :keyword name: :paramtype name: str :keyword columns: This is a dictionary. :paramtype columns: dict[str, str or ~flow.models.MetricValueType] :keyword properties: :paramtype properties: ~flow.models.MetricProperties :keyword namespace: :paramtype namespace: str :keyword standard_schema_id: :paramtype standard_schema_id: str :keyword value: :paramtype value: list[~flow.models.MetricV2Value] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(MetricV2Dto, self).__init__(**kwargs) self.data_container_id = kwargs.get('data_container_id', None) self.name = kwargs.get('name', None) self.columns = kwargs.get('columns', None) self.properties = kwargs.get('properties', None) self.namespace = kwargs.get('namespace', None) self.standard_schema_id = kwargs.get('standard_schema_id', None) self.value = kwargs.get('value', None) self.continuation_token = kwargs.get('continuation_token', None) self.next_link = kwargs.get('next_link', None) class MetricV2Value(msrest.serialization.Model): """MetricV2Value. :ivar metric_id: :vartype metric_id: str :ivar created_utc: :vartype created_utc: ~datetime.datetime :ivar step: :vartype step: long :ivar data: Dictionary of :code:`<any>`. :vartype data: dict[str, any] :ivar sas_uri: :vartype sas_uri: str """ _attribute_map = { 'metric_id': {'key': 'metricId', 'type': 'str'}, 'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'}, 'step': {'key': 'step', 'type': 'long'}, 'data': {'key': 'data', 'type': '{object}'}, 'sas_uri': {'key': 'sasUri', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword metric_id: :paramtype metric_id: str :keyword created_utc: :paramtype created_utc: ~datetime.datetime :keyword step: :paramtype step: long :keyword data: Dictionary of :code:`<any>`. :paramtype data: dict[str, any] :keyword sas_uri: :paramtype sas_uri: str """ super(MetricV2Value, self).__init__(**kwargs) self.metric_id = kwargs.get('metric_id', None) self.created_utc = kwargs.get('created_utc', None) self.step = kwargs.get('step', None) self.data = kwargs.get('data', None) self.sas_uri = kwargs.get('sas_uri', None) class MfeInternalAutologgerSettings(msrest.serialization.Model): """MfeInternalAutologgerSettings. :ivar mlflow_autologger: Possible values include: "Enabled", "Disabled". :vartype mlflow_autologger: str or ~flow.models.MfeInternalMLFlowAutologgerState """ _attribute_map = { 'mlflow_autologger': {'key': 'mlflowAutologger', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword mlflow_autologger: Possible values include: "Enabled", "Disabled". :paramtype mlflow_autologger: str or ~flow.models.MfeInternalMLFlowAutologgerState """ super(MfeInternalAutologgerSettings, self).__init__(**kwargs) self.mlflow_autologger = kwargs.get('mlflow_autologger', None) class MfeInternalIdentityConfiguration(msrest.serialization.Model): """MfeInternalIdentityConfiguration. :ivar identity_type: Possible values include: "Managed", "AMLToken", "UserIdentity". :vartype identity_type: str or ~flow.models.MfeInternalIdentityType """ _attribute_map = { 'identity_type': {'key': 'identityType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword identity_type: Possible values include: "Managed", "AMLToken", "UserIdentity". :paramtype identity_type: str or ~flow.models.MfeInternalIdentityType """ super(MfeInternalIdentityConfiguration, self).__init__(**kwargs) self.identity_type = kwargs.get('identity_type', None) class MfeInternalNodes(msrest.serialization.Model): """MfeInternalNodes. :ivar nodes_value_type: The only acceptable values to pass in are None and "All". The default value is None. :vartype nodes_value_type: str """ _attribute_map = { 'nodes_value_type': {'key': 'nodesValueType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword nodes_value_type: The only acceptable values to pass in are None and "All". The default value is None. :paramtype nodes_value_type: str """ super(MfeInternalNodes, self).__init__(**kwargs) self.nodes_value_type = kwargs.get('nodes_value_type', None) class MfeInternalOutputData(msrest.serialization.Model): """MfeInternalOutputData. :ivar dataset_name: :vartype dataset_name: str :ivar datastore: :vartype datastore: str :ivar datapath: :vartype datapath: str :ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :vartype mode: str or ~flow.models.DataBindingMode """ _attribute_map = { 'dataset_name': {'key': 'datasetName', 'type': 'str'}, 'datastore': {'key': 'datastore', 'type': 'str'}, 'datapath': {'key': 'datapath', 'type': 'str'}, 'mode': {'key': 'mode', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword dataset_name: :paramtype dataset_name: str :keyword datastore: :paramtype datastore: str :keyword datapath: :paramtype datapath: str :keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :paramtype mode: str or ~flow.models.DataBindingMode """ super(MfeInternalOutputData, self).__init__(**kwargs) self.dataset_name = kwargs.get('dataset_name', None) self.datastore = kwargs.get('datastore', None) self.datapath = kwargs.get('datapath', None) self.mode = kwargs.get('mode', None) class MfeInternalSecretConfiguration(msrest.serialization.Model): """MfeInternalSecretConfiguration. :ivar workspace_secret_name: :vartype workspace_secret_name: str :ivar uri: :vartype uri: str """ _attribute_map = { 'workspace_secret_name': {'key': 'workspaceSecretName', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword workspace_secret_name: :paramtype workspace_secret_name: str :keyword uri: :paramtype uri: str """ super(MfeInternalSecretConfiguration, self).__init__(**kwargs) self.workspace_secret_name = kwargs.get('workspace_secret_name', None) self.uri = kwargs.get('uri', None) class MfeInternalUriReference(msrest.serialization.Model): """MfeInternalUriReference. :ivar file: :vartype file: str :ivar folder: :vartype folder: str """ _attribute_map = { 'file': {'key': 'file', 'type': 'str'}, 'folder': {'key': 'folder', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword file: :paramtype file: str :keyword folder: :paramtype folder: str """ super(MfeInternalUriReference, self).__init__(**kwargs) self.file = kwargs.get('file', None) self.folder = kwargs.get('folder', None) class MfeInternalV20211001ComponentJob(msrest.serialization.Model): """MfeInternalV20211001ComponentJob. :ivar compute_id: :vartype compute_id: str :ivar component_id: :vartype component_id: str :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.JobInput] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.JobOutput] :ivar overrides: Anything. :vartype overrides: any """ _attribute_map = { 'compute_id': {'key': 'computeId', 'type': 'str'}, 'component_id': {'key': 'componentId', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, 'overrides': {'key': 'overrides', 'type': 'object'}, } def __init__( self, **kwargs ): """ :keyword compute_id: :paramtype compute_id: str :keyword component_id: :paramtype component_id: str :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.JobInput] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.JobOutput] :keyword overrides: Anything. :paramtype overrides: any """ super(MfeInternalV20211001ComponentJob, self).__init__(**kwargs) self.compute_id = kwargs.get('compute_id', None) self.component_id = kwargs.get('component_id', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.overrides = kwargs.get('overrides', None) class MinMaxParameterRule(msrest.serialization.Model): """MinMaxParameterRule. :ivar min: :vartype min: float :ivar max: :vartype max: float """ _attribute_map = { 'min': {'key': 'min', 'type': 'float'}, 'max': {'key': 'max', 'type': 'float'}, } def __init__( self, **kwargs ): """ :keyword min: :paramtype min: float :keyword max: :paramtype max: float """ super(MinMaxParameterRule, self).__init__(**kwargs) self.min = kwargs.get('min', None) self.max = kwargs.get('max', None) class MlcComputeInfo(msrest.serialization.Model): """MlcComputeInfo. :ivar mlc_compute_type: :vartype mlc_compute_type: str """ _attribute_map = { 'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword mlc_compute_type: :paramtype mlc_compute_type: str """ super(MlcComputeInfo, self).__init__(**kwargs) self.mlc_compute_type = kwargs.get('mlc_compute_type', None) class ModelDto(msrest.serialization.Model): """ModelDto. :ivar feed_name: :vartype feed_name: str :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar aml_data_store_name: :vartype aml_data_store_name: str :ivar relative_path: :vartype relative_path: str :ivar id: :vartype id: str :ivar version: :vartype version: str :ivar system_data: :vartype system_data: ~flow.models.SystemData :ivar arm_id: :vartype arm_id: str :ivar online_endpoint_yaml_str: :vartype online_endpoint_yaml_str: str """ _attribute_map = { 'feed_name': {'key': 'feedName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'arm_id': {'key': 'armId', 'type': 'str'}, 'online_endpoint_yaml_str': {'key': 'onlineEndpointYamlStr', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword feed_name: :paramtype feed_name: str :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str :keyword relative_path: :paramtype relative_path: str :keyword id: :paramtype id: str :keyword version: :paramtype version: str :keyword system_data: :paramtype system_data: ~flow.models.SystemData :keyword arm_id: :paramtype arm_id: str :keyword online_endpoint_yaml_str: :paramtype online_endpoint_yaml_str: str """ super(ModelDto, self).__init__(**kwargs) self.feed_name = kwargs.get('feed_name', None) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.aml_data_store_name = kwargs.get('aml_data_store_name', None) self.relative_path = kwargs.get('relative_path', None) self.id = kwargs.get('id', None) self.version = kwargs.get('version', None) self.system_data = kwargs.get('system_data', None) self.arm_id = kwargs.get('arm_id', None) self.online_endpoint_yaml_str = kwargs.get('online_endpoint_yaml_str', None) class ModelManagementErrorResponse(msrest.serialization.Model): """ModelManagementErrorResponse. :ivar code: :vartype code: str :ivar status_code: :vartype status_code: int :ivar message: :vartype message: str :ivar target: :vartype target: str :ivar details: :vartype details: list[~flow.models.InnerErrorDetails] :ivar correlation: Dictionary of :code:`<string>`. :vartype correlation: dict[str, str] """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'status_code': {'key': 'statusCode', 'type': 'int'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[InnerErrorDetails]'}, 'correlation': {'key': 'correlation', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword code: :paramtype code: str :keyword status_code: :paramtype status_code: int :keyword message: :paramtype message: str :keyword target: :paramtype target: str :keyword details: :paramtype details: list[~flow.models.InnerErrorDetails] :keyword correlation: Dictionary of :code:`<string>`. :paramtype correlation: dict[str, str] """ super(ModelManagementErrorResponse, self).__init__(**kwargs) self.code = kwargs.get('code', None) self.status_code = kwargs.get('status_code', None) self.message = kwargs.get('message', None) self.target = kwargs.get('target', None) self.details = kwargs.get('details', None) self.correlation = kwargs.get('correlation', None) class ModifyPipelineJobScheduleDto(msrest.serialization.Model): """ModifyPipelineJobScheduleDto. :ivar pipeline_job_name: :vartype pipeline_job_name: str :ivar pipeline_job_runtime_settings: :vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings :ivar display_name: :vartype display_name: str :ivar trigger_type: Possible values include: "Recurrence", "Cron". :vartype trigger_type: str or ~flow.models.TriggerType :ivar recurrence: :vartype recurrence: ~flow.models.Recurrence :ivar cron: :vartype cron: ~flow.models.Cron :ivar status: Possible values include: "Enabled", "Disabled". :vartype status: str or ~flow.models.ScheduleStatus :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'}, 'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'trigger_type': {'key': 'triggerType', 'type': 'str'}, 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, 'cron': {'key': 'cron', 'type': 'Cron'}, 'status': {'key': 'status', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword pipeline_job_name: :paramtype pipeline_job_name: str :keyword pipeline_job_runtime_settings: :paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings :keyword display_name: :paramtype display_name: str :keyword trigger_type: Possible values include: "Recurrence", "Cron". :paramtype trigger_type: str or ~flow.models.TriggerType :keyword recurrence: :paramtype recurrence: ~flow.models.Recurrence :keyword cron: :paramtype cron: ~flow.models.Cron :keyword status: Possible values include: "Enabled", "Disabled". :paramtype status: str or ~flow.models.ScheduleStatus :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(ModifyPipelineJobScheduleDto, self).__init__(**kwargs) self.pipeline_job_name = kwargs.get('pipeline_job_name', None) self.pipeline_job_runtime_settings = kwargs.get('pipeline_job_runtime_settings', None) self.display_name = kwargs.get('display_name', None) self.trigger_type = kwargs.get('trigger_type', None) self.recurrence = kwargs.get('recurrence', None) self.cron = kwargs.get('cron', None) self.status = kwargs.get('status', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) class ModuleDto(msrest.serialization.Model): """ModuleDto. :ivar namespace: :vartype namespace: str :ivar tags: A set of tags. :vartype tags: list[str] :ivar display_name: :vartype display_name: str :ivar dict_tags: Dictionary of :code:`<string>`. :vartype dict_tags: dict[str, str] :ivar module_version_id: :vartype module_version_id: str :ivar feed_name: :vartype feed_name: str :ivar registry_name: :vartype registry_name: str :ivar module_name: :vartype module_name: str :ivar module_version: :vartype module_version: str :ivar description: :vartype description: str :ivar owner: :vartype owner: str :ivar job_type: :vartype job_type: str :ivar default_version: :vartype default_version: str :ivar family_id: :vartype family_id: str :ivar help_document: :vartype help_document: str :ivar codegen_by: :vartype codegen_by: str :ivar arm_id: :vartype arm_id: str :ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated". :vartype module_scope: str or ~flow.models.ModuleScope :ivar module_entity: :vartype module_entity: ~flow.models.ModuleEntity :ivar input_types: :vartype input_types: list[str] :ivar output_types: :vartype output_types: list[str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar yaml_link: :vartype yaml_link: str :ivar yaml_link_with_commit_sha: :vartype yaml_link_with_commit_sha: str :ivar module_source_type: Possible values include: "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo". :vartype module_source_type: str or ~flow.models.ModuleSourceType :ivar registered_by: :vartype registered_by: str :ivar versions: :vartype versions: list[~flow.models.AzureMLModuleVersionDescriptor] :ivar is_default_module_version: :vartype is_default_module_version: bool :ivar system_data: :vartype system_data: ~flow.models.SystemData :ivar system_meta: :vartype system_meta: ~flow.models.SystemMeta :ivar snapshot_id: :vartype snapshot_id: str :ivar entry: :vartype entry: str :ivar os_type: :vartype os_type: str :ivar require_gpu: :vartype require_gpu: bool :ivar module_python_interface: :vartype module_python_interface: ~flow.models.ModulePythonInterface :ivar environment_asset_id: :vartype environment_asset_id: str :ivar run_setting_parameters: :vartype run_setting_parameters: list[~flow.models.RunSettingParameter] :ivar supported_ui_input_data_delivery_modes: Dictionary of <componentsΒ·9qwi7eΒ·schemasΒ·moduledtoΒ·propertiesΒ·supporteduiinputdatadeliverymodesΒ·additionalproperties>. :vartype supported_ui_input_data_delivery_modes: dict[str, list[str or ~flow.models.UIInputDataDeliveryMode]] :ivar output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`. :vartype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec] :ivar yaml_str: :vartype yaml_str: str """ _attribute_map = { 'namespace': {'key': 'namespace', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '[str]'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'dict_tags': {'key': 'dictTags', 'type': '{str}'}, 'module_version_id': {'key': 'moduleVersionId', 'type': 'str'}, 'feed_name': {'key': 'feedName', 'type': 'str'}, 'registry_name': {'key': 'registryName', 'type': 'str'}, 'module_name': {'key': 'moduleName', 'type': 'str'}, 'module_version': {'key': 'moduleVersion', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'owner': {'key': 'owner', 'type': 'str'}, 'job_type': {'key': 'jobType', 'type': 'str'}, 'default_version': {'key': 'defaultVersion', 'type': 'str'}, 'family_id': {'key': 'familyId', 'type': 'str'}, 'help_document': {'key': 'helpDocument', 'type': 'str'}, 'codegen_by': {'key': 'codegenBy', 'type': 'str'}, 'arm_id': {'key': 'armId', 'type': 'str'}, 'module_scope': {'key': 'moduleScope', 'type': 'str'}, 'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'}, 'input_types': {'key': 'inputTypes', 'type': '[str]'}, 'output_types': {'key': 'outputTypes', 'type': '[str]'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'yaml_link': {'key': 'yamlLink', 'type': 'str'}, 'yaml_link_with_commit_sha': {'key': 'yamlLinkWithCommitSha', 'type': 'str'}, 'module_source_type': {'key': 'moduleSourceType', 'type': 'str'}, 'registered_by': {'key': 'registeredBy', 'type': 'str'}, 'versions': {'key': 'versions', 'type': '[AzureMLModuleVersionDescriptor]'}, 'is_default_module_version': {'key': 'isDefaultModuleVersion', 'type': 'bool'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'system_meta': {'key': 'systemMeta', 'type': 'SystemMeta'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, 'entry': {'key': 'entry', 'type': 'str'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'require_gpu': {'key': 'requireGpu', 'type': 'bool'}, 'module_python_interface': {'key': 'modulePythonInterface', 'type': 'ModulePythonInterface'}, 'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'}, 'run_setting_parameters': {'key': 'runSettingParameters', 'type': '[RunSettingParameter]'}, 'supported_ui_input_data_delivery_modes': {'key': 'supportedUIInputDataDeliveryModes', 'type': '{[str]}'}, 'output_setting_specs': {'key': 'outputSettingSpecs', 'type': '{OutputSettingSpec}'}, 'yaml_str': {'key': 'yamlStr', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword namespace: :paramtype namespace: str :keyword tags: A set of tags. :paramtype tags: list[str] :keyword display_name: :paramtype display_name: str :keyword dict_tags: Dictionary of :code:`<string>`. :paramtype dict_tags: dict[str, str] :keyword module_version_id: :paramtype module_version_id: str :keyword feed_name: :paramtype feed_name: str :keyword registry_name: :paramtype registry_name: str :keyword module_name: :paramtype module_name: str :keyword module_version: :paramtype module_version: str :keyword description: :paramtype description: str :keyword owner: :paramtype owner: str :keyword job_type: :paramtype job_type: str :keyword default_version: :paramtype default_version: str :keyword family_id: :paramtype family_id: str :keyword help_document: :paramtype help_document: str :keyword codegen_by: :paramtype codegen_by: str :keyword arm_id: :paramtype arm_id: str :keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated". :paramtype module_scope: str or ~flow.models.ModuleScope :keyword module_entity: :paramtype module_entity: ~flow.models.ModuleEntity :keyword input_types: :paramtype input_types: list[str] :keyword output_types: :paramtype output_types: list[str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword yaml_link: :paramtype yaml_link: str :keyword yaml_link_with_commit_sha: :paramtype yaml_link_with_commit_sha: str :keyword module_source_type: Possible values include: "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo". :paramtype module_source_type: str or ~flow.models.ModuleSourceType :keyword registered_by: :paramtype registered_by: str :keyword versions: :paramtype versions: list[~flow.models.AzureMLModuleVersionDescriptor] :keyword is_default_module_version: :paramtype is_default_module_version: bool :keyword system_data: :paramtype system_data: ~flow.models.SystemData :keyword system_meta: :paramtype system_meta: ~flow.models.SystemMeta :keyword snapshot_id: :paramtype snapshot_id: str :keyword entry: :paramtype entry: str :keyword os_type: :paramtype os_type: str :keyword require_gpu: :paramtype require_gpu: bool :keyword module_python_interface: :paramtype module_python_interface: ~flow.models.ModulePythonInterface :keyword environment_asset_id: :paramtype environment_asset_id: str :keyword run_setting_parameters: :paramtype run_setting_parameters: list[~flow.models.RunSettingParameter] :keyword supported_ui_input_data_delivery_modes: Dictionary of <componentsΒ·9qwi7eΒ·schemasΒ·moduledtoΒ·propertiesΒ·supporteduiinputdatadeliverymodesΒ·additionalproperties>. :paramtype supported_ui_input_data_delivery_modes: dict[str, list[str or ~flow.models.UIInputDataDeliveryMode]] :keyword output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`. :paramtype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec] :keyword yaml_str: :paramtype yaml_str: str """ super(ModuleDto, self).__init__(**kwargs) self.namespace = kwargs.get('namespace', None) self.tags = kwargs.get('tags', None) self.display_name = kwargs.get('display_name', None) self.dict_tags = kwargs.get('dict_tags', None) self.module_version_id = kwargs.get('module_version_id', None) self.feed_name = kwargs.get('feed_name', None) self.registry_name = kwargs.get('registry_name', None) self.module_name = kwargs.get('module_name', None) self.module_version = kwargs.get('module_version', None) self.description = kwargs.get('description', None) self.owner = kwargs.get('owner', None) self.job_type = kwargs.get('job_type', None) self.default_version = kwargs.get('default_version', None) self.family_id = kwargs.get('family_id', None) self.help_document = kwargs.get('help_document', None) self.codegen_by = kwargs.get('codegen_by', None) self.arm_id = kwargs.get('arm_id', None) self.module_scope = kwargs.get('module_scope', None) self.module_entity = kwargs.get('module_entity', None) self.input_types = kwargs.get('input_types', None) self.output_types = kwargs.get('output_types', None) self.entity_status = kwargs.get('entity_status', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) self.yaml_link = kwargs.get('yaml_link', None) self.yaml_link_with_commit_sha = kwargs.get('yaml_link_with_commit_sha', None) self.module_source_type = kwargs.get('module_source_type', None) self.registered_by = kwargs.get('registered_by', None) self.versions = kwargs.get('versions', None) self.is_default_module_version = kwargs.get('is_default_module_version', None) self.system_data = kwargs.get('system_data', None) self.system_meta = kwargs.get('system_meta', None) self.snapshot_id = kwargs.get('snapshot_id', None) self.entry = kwargs.get('entry', None) self.os_type = kwargs.get('os_type', None) self.require_gpu = kwargs.get('require_gpu', None) self.module_python_interface = kwargs.get('module_python_interface', None) self.environment_asset_id = kwargs.get('environment_asset_id', None) self.run_setting_parameters = kwargs.get('run_setting_parameters', None) self.supported_ui_input_data_delivery_modes = kwargs.get('supported_ui_input_data_delivery_modes', None) self.output_setting_specs = kwargs.get('output_setting_specs', None) self.yaml_str = kwargs.get('yaml_str', None) class ModuleDtoWithErrors(msrest.serialization.Model): """ModuleDtoWithErrors. :ivar version_id_to_module_dto: This is a dictionary. :vartype version_id_to_module_dto: dict[str, ~flow.models.ModuleDto] :ivar name_and_version_to_module_dto: :vartype name_and_version_to_module_dto: list[~flow.models.KeyValuePairComponentNameMetaInfoModuleDto] :ivar version_id_to_error: This is a dictionary. :vartype version_id_to_error: dict[str, ~flow.models.ErrorResponse] :ivar name_and_version_to_error: :vartype name_and_version_to_error: list[~flow.models.KeyValuePairComponentNameMetaInfoErrorResponse] """ _attribute_map = { 'version_id_to_module_dto': {'key': 'versionIdToModuleDto', 'type': '{ModuleDto}'}, 'name_and_version_to_module_dto': {'key': 'nameAndVersionToModuleDto', 'type': '[KeyValuePairComponentNameMetaInfoModuleDto]'}, 'version_id_to_error': {'key': 'versionIdToError', 'type': '{ErrorResponse}'}, 'name_and_version_to_error': {'key': 'nameAndVersionToError', 'type': '[KeyValuePairComponentNameMetaInfoErrorResponse]'}, } def __init__( self, **kwargs ): """ :keyword version_id_to_module_dto: This is a dictionary. :paramtype version_id_to_module_dto: dict[str, ~flow.models.ModuleDto] :keyword name_and_version_to_module_dto: :paramtype name_and_version_to_module_dto: list[~flow.models.KeyValuePairComponentNameMetaInfoModuleDto] :keyword version_id_to_error: This is a dictionary. :paramtype version_id_to_error: dict[str, ~flow.models.ErrorResponse] :keyword name_and_version_to_error: :paramtype name_and_version_to_error: list[~flow.models.KeyValuePairComponentNameMetaInfoErrorResponse] """ super(ModuleDtoWithErrors, self).__init__(**kwargs) self.version_id_to_module_dto = kwargs.get('version_id_to_module_dto', None) self.name_and_version_to_module_dto = kwargs.get('name_and_version_to_module_dto', None) self.version_id_to_error = kwargs.get('version_id_to_error', None) self.name_and_version_to_error = kwargs.get('name_and_version_to_error', None) class ModuleDtoWithValidateStatus(msrest.serialization.Model): """ModuleDtoWithValidateStatus. :ivar existing_module_entity: :vartype existing_module_entity: ~flow.models.ModuleEntity :ivar status: Possible values include: "NewModule", "NewVersion", "Conflict", "ParseError", "ProcessRequestError". :vartype status: str or ~flow.models.ModuleInfoFromYamlStatusEnum :ivar status_details: :vartype status_details: str :ivar error_details: :vartype error_details: list[str] :ivar serialized_module_info: :vartype serialized_module_info: str :ivar namespace: :vartype namespace: str :ivar tags: A set of tags. :vartype tags: list[str] :ivar display_name: :vartype display_name: str :ivar dict_tags: Dictionary of :code:`<string>`. :vartype dict_tags: dict[str, str] :ivar module_version_id: :vartype module_version_id: str :ivar feed_name: :vartype feed_name: str :ivar registry_name: :vartype registry_name: str :ivar module_name: :vartype module_name: str :ivar module_version: :vartype module_version: str :ivar description: :vartype description: str :ivar owner: :vartype owner: str :ivar job_type: :vartype job_type: str :ivar default_version: :vartype default_version: str :ivar family_id: :vartype family_id: str :ivar help_document: :vartype help_document: str :ivar codegen_by: :vartype codegen_by: str :ivar arm_id: :vartype arm_id: str :ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated". :vartype module_scope: str or ~flow.models.ModuleScope :ivar module_entity: :vartype module_entity: ~flow.models.ModuleEntity :ivar input_types: :vartype input_types: list[str] :ivar output_types: :vartype output_types: list[str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar yaml_link: :vartype yaml_link: str :ivar yaml_link_with_commit_sha: :vartype yaml_link_with_commit_sha: str :ivar module_source_type: Possible values include: "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo". :vartype module_source_type: str or ~flow.models.ModuleSourceType :ivar registered_by: :vartype registered_by: str :ivar versions: :vartype versions: list[~flow.models.AzureMLModuleVersionDescriptor] :ivar is_default_module_version: :vartype is_default_module_version: bool :ivar system_data: :vartype system_data: ~flow.models.SystemData :ivar system_meta: :vartype system_meta: ~flow.models.SystemMeta :ivar snapshot_id: :vartype snapshot_id: str :ivar entry: :vartype entry: str :ivar os_type: :vartype os_type: str :ivar require_gpu: :vartype require_gpu: bool :ivar module_python_interface: :vartype module_python_interface: ~flow.models.ModulePythonInterface :ivar environment_asset_id: :vartype environment_asset_id: str :ivar run_setting_parameters: :vartype run_setting_parameters: list[~flow.models.RunSettingParameter] :ivar supported_ui_input_data_delivery_modes: Dictionary of <componentsΒ·8o5zajΒ·schemasΒ·moduledtowithvalidatestatusΒ·propertiesΒ·supporteduiinputdatadeliverymodesΒ·additionalproperties>. :vartype supported_ui_input_data_delivery_modes: dict[str, list[str or ~flow.models.UIInputDataDeliveryMode]] :ivar output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`. :vartype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec] :ivar yaml_str: :vartype yaml_str: str """ _attribute_map = { 'existing_module_entity': {'key': 'existingModuleEntity', 'type': 'ModuleEntity'}, 'status': {'key': 'status', 'type': 'str'}, 'status_details': {'key': 'statusDetails', 'type': 'str'}, 'error_details': {'key': 'errorDetails', 'type': '[str]'}, 'serialized_module_info': {'key': 'serializedModuleInfo', 'type': 'str'}, 'namespace': {'key': 'namespace', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '[str]'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'dict_tags': {'key': 'dictTags', 'type': '{str}'}, 'module_version_id': {'key': 'moduleVersionId', 'type': 'str'}, 'feed_name': {'key': 'feedName', 'type': 'str'}, 'registry_name': {'key': 'registryName', 'type': 'str'}, 'module_name': {'key': 'moduleName', 'type': 'str'}, 'module_version': {'key': 'moduleVersion', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'owner': {'key': 'owner', 'type': 'str'}, 'job_type': {'key': 'jobType', 'type': 'str'}, 'default_version': {'key': 'defaultVersion', 'type': 'str'}, 'family_id': {'key': 'familyId', 'type': 'str'}, 'help_document': {'key': 'helpDocument', 'type': 'str'}, 'codegen_by': {'key': 'codegenBy', 'type': 'str'}, 'arm_id': {'key': 'armId', 'type': 'str'}, 'module_scope': {'key': 'moduleScope', 'type': 'str'}, 'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'}, 'input_types': {'key': 'inputTypes', 'type': '[str]'}, 'output_types': {'key': 'outputTypes', 'type': '[str]'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'yaml_link': {'key': 'yamlLink', 'type': 'str'}, 'yaml_link_with_commit_sha': {'key': 'yamlLinkWithCommitSha', 'type': 'str'}, 'module_source_type': {'key': 'moduleSourceType', 'type': 'str'}, 'registered_by': {'key': 'registeredBy', 'type': 'str'}, 'versions': {'key': 'versions', 'type': '[AzureMLModuleVersionDescriptor]'}, 'is_default_module_version': {'key': 'isDefaultModuleVersion', 'type': 'bool'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'system_meta': {'key': 'systemMeta', 'type': 'SystemMeta'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, 'entry': {'key': 'entry', 'type': 'str'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'require_gpu': {'key': 'requireGpu', 'type': 'bool'}, 'module_python_interface': {'key': 'modulePythonInterface', 'type': 'ModulePythonInterface'}, 'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'}, 'run_setting_parameters': {'key': 'runSettingParameters', 'type': '[RunSettingParameter]'}, 'supported_ui_input_data_delivery_modes': {'key': 'supportedUIInputDataDeliveryModes', 'type': '{[str]}'}, 'output_setting_specs': {'key': 'outputSettingSpecs', 'type': '{OutputSettingSpec}'}, 'yaml_str': {'key': 'yamlStr', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword existing_module_entity: :paramtype existing_module_entity: ~flow.models.ModuleEntity :keyword status: Possible values include: "NewModule", "NewVersion", "Conflict", "ParseError", "ProcessRequestError". :paramtype status: str or ~flow.models.ModuleInfoFromYamlStatusEnum :keyword status_details: :paramtype status_details: str :keyword error_details: :paramtype error_details: list[str] :keyword serialized_module_info: :paramtype serialized_module_info: str :keyword namespace: :paramtype namespace: str :keyword tags: A set of tags. :paramtype tags: list[str] :keyword display_name: :paramtype display_name: str :keyword dict_tags: Dictionary of :code:`<string>`. :paramtype dict_tags: dict[str, str] :keyword module_version_id: :paramtype module_version_id: str :keyword feed_name: :paramtype feed_name: str :keyword registry_name: :paramtype registry_name: str :keyword module_name: :paramtype module_name: str :keyword module_version: :paramtype module_version: str :keyword description: :paramtype description: str :keyword owner: :paramtype owner: str :keyword job_type: :paramtype job_type: str :keyword default_version: :paramtype default_version: str :keyword family_id: :paramtype family_id: str :keyword help_document: :paramtype help_document: str :keyword codegen_by: :paramtype codegen_by: str :keyword arm_id: :paramtype arm_id: str :keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated". :paramtype module_scope: str or ~flow.models.ModuleScope :keyword module_entity: :paramtype module_entity: ~flow.models.ModuleEntity :keyword input_types: :paramtype input_types: list[str] :keyword output_types: :paramtype output_types: list[str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword yaml_link: :paramtype yaml_link: str :keyword yaml_link_with_commit_sha: :paramtype yaml_link_with_commit_sha: str :keyword module_source_type: Possible values include: "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo". :paramtype module_source_type: str or ~flow.models.ModuleSourceType :keyword registered_by: :paramtype registered_by: str :keyword versions: :paramtype versions: list[~flow.models.AzureMLModuleVersionDescriptor] :keyword is_default_module_version: :paramtype is_default_module_version: bool :keyword system_data: :paramtype system_data: ~flow.models.SystemData :keyword system_meta: :paramtype system_meta: ~flow.models.SystemMeta :keyword snapshot_id: :paramtype snapshot_id: str :keyword entry: :paramtype entry: str :keyword os_type: :paramtype os_type: str :keyword require_gpu: :paramtype require_gpu: bool :keyword module_python_interface: :paramtype module_python_interface: ~flow.models.ModulePythonInterface :keyword environment_asset_id: :paramtype environment_asset_id: str :keyword run_setting_parameters: :paramtype run_setting_parameters: list[~flow.models.RunSettingParameter] :keyword supported_ui_input_data_delivery_modes: Dictionary of <componentsΒ·8o5zajΒ·schemasΒ·moduledtowithvalidatestatusΒ·propertiesΒ·supporteduiinputdatadeliverymodesΒ·additionalproperties>. :paramtype supported_ui_input_data_delivery_modes: dict[str, list[str or ~flow.models.UIInputDataDeliveryMode]] :keyword output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`. :paramtype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec] :keyword yaml_str: :paramtype yaml_str: str """ super(ModuleDtoWithValidateStatus, self).__init__(**kwargs) self.existing_module_entity = kwargs.get('existing_module_entity', None) self.status = kwargs.get('status', None) self.status_details = kwargs.get('status_details', None) self.error_details = kwargs.get('error_details', None) self.serialized_module_info = kwargs.get('serialized_module_info', None) self.namespace = kwargs.get('namespace', None) self.tags = kwargs.get('tags', None) self.display_name = kwargs.get('display_name', None) self.dict_tags = kwargs.get('dict_tags', None) self.module_version_id = kwargs.get('module_version_id', None) self.feed_name = kwargs.get('feed_name', None) self.registry_name = kwargs.get('registry_name', None) self.module_name = kwargs.get('module_name', None) self.module_version = kwargs.get('module_version', None) self.description = kwargs.get('description', None) self.owner = kwargs.get('owner', None) self.job_type = kwargs.get('job_type', None) self.default_version = kwargs.get('default_version', None) self.family_id = kwargs.get('family_id', None) self.help_document = kwargs.get('help_document', None) self.codegen_by = kwargs.get('codegen_by', None) self.arm_id = kwargs.get('arm_id', None) self.module_scope = kwargs.get('module_scope', None) self.module_entity = kwargs.get('module_entity', None) self.input_types = kwargs.get('input_types', None) self.output_types = kwargs.get('output_types', None) self.entity_status = kwargs.get('entity_status', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) self.yaml_link = kwargs.get('yaml_link', None) self.yaml_link_with_commit_sha = kwargs.get('yaml_link_with_commit_sha', None) self.module_source_type = kwargs.get('module_source_type', None) self.registered_by = kwargs.get('registered_by', None) self.versions = kwargs.get('versions', None) self.is_default_module_version = kwargs.get('is_default_module_version', None) self.system_data = kwargs.get('system_data', None) self.system_meta = kwargs.get('system_meta', None) self.snapshot_id = kwargs.get('snapshot_id', None) self.entry = kwargs.get('entry', None) self.os_type = kwargs.get('os_type', None) self.require_gpu = kwargs.get('require_gpu', None) self.module_python_interface = kwargs.get('module_python_interface', None) self.environment_asset_id = kwargs.get('environment_asset_id', None) self.run_setting_parameters = kwargs.get('run_setting_parameters', None) self.supported_ui_input_data_delivery_modes = kwargs.get('supported_ui_input_data_delivery_modes', None) self.output_setting_specs = kwargs.get('output_setting_specs', None) self.yaml_str = kwargs.get('yaml_str', None) class ModuleEntity(msrest.serialization.Model): """ModuleEntity. :ivar display_name: :vartype display_name: str :ivar module_execution_type: :vartype module_execution_type: str :ivar module_type: Possible values include: "None", "BatchInferencing". :vartype module_type: str or ~flow.models.ModuleType :ivar module_type_version: :vartype module_type_version: str :ivar upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed". :vartype upload_state: str or ~flow.models.UploadState :ivar is_deterministic: :vartype is_deterministic: bool :ivar structured_interface: :vartype structured_interface: ~flow.models.StructuredInterface :ivar data_location: :vartype data_location: ~flow.models.DataLocation :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar created_by: :vartype created_by: ~flow.models.CreatedBy :ivar last_updated_by: :vartype last_updated_by: ~flow.models.CreatedBy :ivar runconfig: :vartype runconfig: str :ivar cloud_settings: :vartype cloud_settings: ~flow.models.CloudSettings :ivar category: :vartype category: str :ivar step_type: :vartype step_type: str :ivar stage: :vartype stage: str :ivar name: :vartype name: str :ivar hash: :vartype hash: str :ivar description: :vartype description: str :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'display_name': {'key': 'displayName', 'type': 'str'}, 'module_execution_type': {'key': 'moduleExecutionType', 'type': 'str'}, 'module_type': {'key': 'moduleType', 'type': 'str'}, 'module_type_version': {'key': 'moduleTypeVersion', 'type': 'str'}, 'upload_state': {'key': 'uploadState', 'type': 'str'}, 'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'}, 'structured_interface': {'key': 'structuredInterface', 'type': 'StructuredInterface'}, 'data_location': {'key': 'dataLocation', 'type': 'DataLocation'}, 'identifier_hash': {'key': 'identifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'identifierHashV2', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'created_by': {'key': 'createdBy', 'type': 'CreatedBy'}, 'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'}, 'runconfig': {'key': 'runconfig', 'type': 'str'}, 'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'}, 'category': {'key': 'category', 'type': 'str'}, 'step_type': {'key': 'stepType', 'type': 'str'}, 'stage': {'key': 'stage', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'hash': {'key': 'hash', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword display_name: :paramtype display_name: str :keyword module_execution_type: :paramtype module_execution_type: str :keyword module_type: Possible values include: "None", "BatchInferencing". :paramtype module_type: str or ~flow.models.ModuleType :keyword module_type_version: :paramtype module_type_version: str :keyword upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed". :paramtype upload_state: str or ~flow.models.UploadState :keyword is_deterministic: :paramtype is_deterministic: bool :keyword structured_interface: :paramtype structured_interface: ~flow.models.StructuredInterface :keyword data_location: :paramtype data_location: ~flow.models.DataLocation :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword created_by: :paramtype created_by: ~flow.models.CreatedBy :keyword last_updated_by: :paramtype last_updated_by: ~flow.models.CreatedBy :keyword runconfig: :paramtype runconfig: str :keyword cloud_settings: :paramtype cloud_settings: ~flow.models.CloudSettings :keyword category: :paramtype category: str :keyword step_type: :paramtype step_type: str :keyword stage: :paramtype stage: str :keyword name: :paramtype name: str :keyword hash: :paramtype hash: str :keyword description: :paramtype description: str :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(ModuleEntity, self).__init__(**kwargs) self.display_name = kwargs.get('display_name', None) self.module_execution_type = kwargs.get('module_execution_type', None) self.module_type = kwargs.get('module_type', None) self.module_type_version = kwargs.get('module_type_version', None) self.upload_state = kwargs.get('upload_state', None) self.is_deterministic = kwargs.get('is_deterministic', None) self.structured_interface = kwargs.get('structured_interface', None) self.data_location = kwargs.get('data_location', None) self.identifier_hash = kwargs.get('identifier_hash', None) self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.created_by = kwargs.get('created_by', None) self.last_updated_by = kwargs.get('last_updated_by', None) self.runconfig = kwargs.get('runconfig', None) self.cloud_settings = kwargs.get('cloud_settings', None) self.category = kwargs.get('category', None) self.step_type = kwargs.get('step_type', None) self.stage = kwargs.get('stage', None) self.name = kwargs.get('name', None) self.hash = kwargs.get('hash', None) self.description = kwargs.get('description', None) self.entity_status = kwargs.get('entity_status', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class ModulePythonInterface(msrest.serialization.Model): """ModulePythonInterface. :ivar inputs: :vartype inputs: list[~flow.models.PythonInterfaceMapping] :ivar outputs: :vartype outputs: list[~flow.models.PythonInterfaceMapping] :ivar parameters: :vartype parameters: list[~flow.models.PythonInterfaceMapping] """ _attribute_map = { 'inputs': {'key': 'inputs', 'type': '[PythonInterfaceMapping]'}, 'outputs': {'key': 'outputs', 'type': '[PythonInterfaceMapping]'}, 'parameters': {'key': 'parameters', 'type': '[PythonInterfaceMapping]'}, } def __init__( self, **kwargs ): """ :keyword inputs: :paramtype inputs: list[~flow.models.PythonInterfaceMapping] :keyword outputs: :paramtype outputs: list[~flow.models.PythonInterfaceMapping] :keyword parameters: :paramtype parameters: list[~flow.models.PythonInterfaceMapping] """ super(ModulePythonInterface, self).__init__(**kwargs) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.parameters = kwargs.get('parameters', None) class MpiConfiguration(msrest.serialization.Model): """MpiConfiguration. :ivar process_count_per_node: :vartype process_count_per_node: int """ _attribute_map = { 'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword process_count_per_node: :paramtype process_count_per_node: int """ super(MpiConfiguration, self).__init__(**kwargs) self.process_count_per_node = kwargs.get('process_count_per_node', None) class NCrossValidations(msrest.serialization.Model): """NCrossValidations. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.NCrossValidationMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.NCrossValidationMode :keyword value: :paramtype value: int """ super(NCrossValidations, self).__init__(**kwargs) self.mode = kwargs.get('mode', None) self.value = kwargs.get('value', None) class Node(msrest.serialization.Model): """Node. :ivar name: :vartype name: str :ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm", "csharp". :vartype type: str or ~flow.models.ToolType :ivar source: :vartype source: ~flow.models.NodeSource :ivar inputs: Dictionary of :code:`<any>`. :vartype inputs: dict[str, any] :ivar tool: :vartype tool: str :ivar reduce: :vartype reduce: bool :ivar activate: :vartype activate: ~flow.models.Activate :ivar comment: :vartype comment: str :ivar api: :vartype api: str :ivar provider: :vartype provider: str :ivar connection: :vartype connection: str :ivar module: :vartype module: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'source': {'key': 'source', 'type': 'NodeSource'}, 'inputs': {'key': 'inputs', 'type': '{object}'}, 'tool': {'key': 'tool', 'type': 'str'}, 'reduce': {'key': 'reduce', 'type': 'bool'}, 'activate': {'key': 'activate', 'type': 'Activate'}, 'comment': {'key': 'comment', 'type': 'str'}, 'api': {'key': 'api', 'type': 'str'}, 'provider': {'key': 'provider', 'type': 'str'}, 'connection': {'key': 'connection', 'type': 'str'}, 'module': {'key': 'module', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm", "csharp". :paramtype type: str or ~flow.models.ToolType :keyword source: :paramtype source: ~flow.models.NodeSource :keyword inputs: Dictionary of :code:`<any>`. :paramtype inputs: dict[str, any] :keyword tool: :paramtype tool: str :keyword reduce: :paramtype reduce: bool :keyword activate: :paramtype activate: ~flow.models.Activate :keyword comment: :paramtype comment: str :keyword api: :paramtype api: str :keyword provider: :paramtype provider: str :keyword connection: :paramtype connection: str :keyword module: :paramtype module: str """ super(Node, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.type = kwargs.get('type', None) self.source = kwargs.get('source', None) self.inputs = kwargs.get('inputs', None) self.tool = kwargs.get('tool', None) self.reduce = kwargs.get('reduce', None) self.activate = kwargs.get('activate', None) self.comment = kwargs.get('comment', None) self.api = kwargs.get('api', None) self.provider = kwargs.get('provider', None) self.connection = kwargs.get('connection', None) self.module = kwargs.get('module', None) class NodeInputPort(msrest.serialization.Model): """NodeInputPort. :ivar name: :vartype name: str :ivar documentation: :vartype documentation: str :ivar data_types_ids: :vartype data_types_ids: list[str] :ivar is_optional: :vartype is_optional: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'documentation': {'key': 'documentation', 'type': 'str'}, 'data_types_ids': {'key': 'dataTypesIds', 'type': '[str]'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword documentation: :paramtype documentation: str :keyword data_types_ids: :paramtype data_types_ids: list[str] :keyword is_optional: :paramtype is_optional: bool """ super(NodeInputPort, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.documentation = kwargs.get('documentation', None) self.data_types_ids = kwargs.get('data_types_ids', None) self.is_optional = kwargs.get('is_optional', None) class NodeLayout(msrest.serialization.Model): """NodeLayout. :ivar x: :vartype x: float :ivar y: :vartype y: float :ivar width: :vartype width: float :ivar height: :vartype height: float :ivar extended_data: :vartype extended_data: str """ _attribute_map = { 'x': {'key': 'x', 'type': 'float'}, 'y': {'key': 'y', 'type': 'float'}, 'width': {'key': 'width', 'type': 'float'}, 'height': {'key': 'height', 'type': 'float'}, 'extended_data': {'key': 'extendedData', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword x: :paramtype x: float :keyword y: :paramtype y: float :keyword width: :paramtype width: float :keyword height: :paramtype height: float :keyword extended_data: :paramtype extended_data: str """ super(NodeLayout, self).__init__(**kwargs) self.x = kwargs.get('x', None) self.y = kwargs.get('y', None) self.width = kwargs.get('width', None) self.height = kwargs.get('height', None) self.extended_data = kwargs.get('extended_data', None) class NodeOutputPort(msrest.serialization.Model): """NodeOutputPort. :ivar name: :vartype name: str :ivar documentation: :vartype documentation: str :ivar data_type_id: :vartype data_type_id: str :ivar pass_through_input_name: :vartype pass_through_input_name: str :ivar early_available: :vartype early_available: bool :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'documentation': {'key': 'documentation', 'type': 'str'}, 'data_type_id': {'key': 'dataTypeId', 'type': 'str'}, 'pass_through_input_name': {'key': 'passThroughInputName', 'type': 'str'}, 'early_available': {'key': 'EarlyAvailable', 'type': 'bool'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword documentation: :paramtype documentation: str :keyword data_type_id: :paramtype data_type_id: str :keyword pass_through_input_name: :paramtype pass_through_input_name: str :keyword early_available: :paramtype early_available: bool :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode """ super(NodeOutputPort, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.documentation = kwargs.get('documentation', None) self.data_type_id = kwargs.get('data_type_id', None) self.pass_through_input_name = kwargs.get('pass_through_input_name', None) self.early_available = kwargs.get('early_available', None) self.data_store_mode = kwargs.get('data_store_mode', None) class NodePortInterface(msrest.serialization.Model): """NodePortInterface. :ivar inputs: :vartype inputs: list[~flow.models.NodeInputPort] :ivar outputs: :vartype outputs: list[~flow.models.NodeOutputPort] :ivar control_outputs: :vartype control_outputs: list[~flow.models.ControlOutput] """ _attribute_map = { 'inputs': {'key': 'inputs', 'type': '[NodeInputPort]'}, 'outputs': {'key': 'outputs', 'type': '[NodeOutputPort]'}, 'control_outputs': {'key': 'controlOutputs', 'type': '[ControlOutput]'}, } def __init__( self, **kwargs ): """ :keyword inputs: :paramtype inputs: list[~flow.models.NodeInputPort] :keyword outputs: :paramtype outputs: list[~flow.models.NodeOutputPort] :keyword control_outputs: :paramtype control_outputs: list[~flow.models.ControlOutput] """ super(NodePortInterface, self).__init__(**kwargs) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.control_outputs = kwargs.get('control_outputs', None) class Nodes(msrest.serialization.Model): """Nodes. All required parameters must be populated in order to send to Azure. :ivar nodes_value_type: Required. Possible values include: "All", "Custom". :vartype nodes_value_type: str or ~flow.models.NodesValueType :ivar values: :vartype values: list[int] """ _validation = { 'nodes_value_type': {'required': True}, } _attribute_map = { 'nodes_value_type': {'key': 'nodes_value_type', 'type': 'str'}, 'values': {'key': 'values', 'type': '[int]'}, } def __init__( self, **kwargs ): """ :keyword nodes_value_type: Required. Possible values include: "All", "Custom". :paramtype nodes_value_type: str or ~flow.models.NodesValueType :keyword values: :paramtype values: list[int] """ super(Nodes, self).__init__(**kwargs) self.nodes_value_type = kwargs['nodes_value_type'] self.values = kwargs.get('values', None) class NodeSource(msrest.serialization.Model): """NodeSource. :ivar type: :vartype type: str :ivar tool: :vartype tool: str :ivar path: :vartype path: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'tool': {'key': 'tool', 'type': 'str'}, 'path': {'key': 'path', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword type: :paramtype type: str :keyword tool: :paramtype tool: str :keyword path: :paramtype path: str """ super(NodeSource, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.tool = kwargs.get('tool', None) self.path = kwargs.get('path', None) class NodeTelemetryMetaInfo(msrest.serialization.Model): """NodeTelemetryMetaInfo. :ivar pipeline_run_id: :vartype pipeline_run_id: str :ivar node_id: :vartype node_id: str :ivar version_id: :vartype version_id: str :ivar node_type: :vartype node_type: str :ivar node_source: :vartype node_source: str :ivar is_anonymous: :vartype is_anonymous: bool :ivar is_pipeline_component: :vartype is_pipeline_component: bool """ _attribute_map = { 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, 'node_id': {'key': 'nodeId', 'type': 'str'}, 'version_id': {'key': 'versionId', 'type': 'str'}, 'node_type': {'key': 'nodeType', 'type': 'str'}, 'node_source': {'key': 'nodeSource', 'type': 'str'}, 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, 'is_pipeline_component': {'key': 'isPipelineComponent', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword pipeline_run_id: :paramtype pipeline_run_id: str :keyword node_id: :paramtype node_id: str :keyword version_id: :paramtype version_id: str :keyword node_type: :paramtype node_type: str :keyword node_source: :paramtype node_source: str :keyword is_anonymous: :paramtype is_anonymous: bool :keyword is_pipeline_component: :paramtype is_pipeline_component: bool """ super(NodeTelemetryMetaInfo, self).__init__(**kwargs) self.pipeline_run_id = kwargs.get('pipeline_run_id', None) self.node_id = kwargs.get('node_id', None) self.version_id = kwargs.get('version_id', None) self.node_type = kwargs.get('node_type', None) self.node_source = kwargs.get('node_source', None) self.is_anonymous = kwargs.get('is_anonymous', None) self.is_pipeline_component = kwargs.get('is_pipeline_component', None) class NodeVariant(msrest.serialization.Model): """NodeVariant. :ivar variants: This is a dictionary. :vartype variants: dict[str, ~flow.models.VariantNode] :ivar default_variant_id: :vartype default_variant_id: str """ _attribute_map = { 'variants': {'key': 'variants', 'type': '{VariantNode}'}, 'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword variants: This is a dictionary. :paramtype variants: dict[str, ~flow.models.VariantNode] :keyword default_variant_id: :paramtype default_variant_id: str """ super(NodeVariant, self).__init__(**kwargs) self.variants = kwargs.get('variants', None) self.default_variant_id = kwargs.get('default_variant_id', None) class NoteBookTaskDto(msrest.serialization.Model): """NoteBookTaskDto. :ivar notebook_path: :vartype notebook_path: str :ivar base_parameters: Dictionary of :code:`<string>`. :vartype base_parameters: dict[str, str] """ _attribute_map = { 'notebook_path': {'key': 'notebook_path', 'type': 'str'}, 'base_parameters': {'key': 'base_parameters', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword notebook_path: :paramtype notebook_path: str :keyword base_parameters: Dictionary of :code:`<string>`. :paramtype base_parameters: dict[str, str] """ super(NoteBookTaskDto, self).__init__(**kwargs) self.notebook_path = kwargs.get('notebook_path', None) self.base_parameters = kwargs.get('base_parameters', None) class NotificationSetting(msrest.serialization.Model): """NotificationSetting. :ivar emails: :vartype emails: list[str] :ivar email_on: :vartype email_on: list[str or ~flow.models.EmailNotificationEnableType] :ivar webhooks: Dictionary of :code:`<Webhook>`. :vartype webhooks: dict[str, ~flow.models.Webhook] """ _attribute_map = { 'emails': {'key': 'emails', 'type': '[str]'}, 'email_on': {'key': 'emailOn', 'type': '[str]'}, 'webhooks': {'key': 'webhooks', 'type': '{Webhook}'}, } def __init__( self, **kwargs ): """ :keyword emails: :paramtype emails: list[str] :keyword email_on: :paramtype email_on: list[str or ~flow.models.EmailNotificationEnableType] :keyword webhooks: Dictionary of :code:`<Webhook>`. :paramtype webhooks: dict[str, ~flow.models.Webhook] """ super(NotificationSetting, self).__init__(**kwargs) self.emails = kwargs.get('emails', None) self.email_on = kwargs.get('email_on', None) self.webhooks = kwargs.get('webhooks', None) class ODataError(msrest.serialization.Model): """Represents OData v4 error object. :ivar code: Gets or sets a language-independent, service-defined error code. This code serves as a sub-status for the HTTP error code specified in the response. :vartype code: str :ivar message: Gets or sets a human-readable, language-dependent representation of the error. The ``Content-Language`` header MUST contain the language code from [RFC5646] corresponding to the language in which the value for message is written. :vartype message: str :ivar target: Gets or sets the target of the particular error (for example, the name of the property in error). :vartype target: str :ivar details: Gets or sets additional details about the error. :vartype details: list[~flow.models.ODataErrorDetail] :ivar innererror: The contents of this object are service-defined. Usually this object contains information that will help debug the service and SHOULD only be used in development environments in order to guard against potential security concerns around information disclosure. :vartype innererror: ~flow.models.ODataInnerError """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[ODataErrorDetail]'}, 'innererror': {'key': 'innererror', 'type': 'ODataInnerError'}, } def __init__( self, **kwargs ): """ :keyword code: Gets or sets a language-independent, service-defined error code. This code serves as a sub-status for the HTTP error code specified in the response. :paramtype code: str :keyword message: Gets or sets a human-readable, language-dependent representation of the error. The ``Content-Language`` header MUST contain the language code from [RFC5646] corresponding to the language in which the value for message is written. :paramtype message: str :keyword target: Gets or sets the target of the particular error (for example, the name of the property in error). :paramtype target: str :keyword details: Gets or sets additional details about the error. :paramtype details: list[~flow.models.ODataErrorDetail] :keyword innererror: The contents of this object are service-defined. Usually this object contains information that will help debug the service and SHOULD only be used in development environments in order to guard against potential security concerns around information disclosure. :paramtype innererror: ~flow.models.ODataInnerError """ super(ODataError, self).__init__(**kwargs) self.code = kwargs.get('code', None) self.message = kwargs.get('message', None) self.target = kwargs.get('target', None) self.details = kwargs.get('details', None) self.innererror = kwargs.get('innererror', None) class ODataErrorDetail(msrest.serialization.Model): """Represents additional error details. :ivar code: Gets or sets a language-independent, service-defined error code. :vartype code: str :ivar message: Gets or sets a human-readable, language-dependent representation of the error. :vartype message: str :ivar target: Gets or sets the target of the particular error (for example, the name of the property in error). :vartype target: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword code: Gets or sets a language-independent, service-defined error code. :paramtype code: str :keyword message: Gets or sets a human-readable, language-dependent representation of the error. :paramtype message: str :keyword target: Gets or sets the target of the particular error (for example, the name of the property in error). :paramtype target: str """ super(ODataErrorDetail, self).__init__(**kwargs) self.code = kwargs.get('code', None) self.message = kwargs.get('message', None) self.target = kwargs.get('target', None) class ODataErrorResponse(msrest.serialization.Model): """Represents OData v4 compliant error response message. :ivar error: Represents OData v4 error object. :vartype error: ~flow.models.ODataError """ _attribute_map = { 'error': {'key': 'error', 'type': 'ODataError'}, } def __init__( self, **kwargs ): """ :keyword error: Represents OData v4 error object. :paramtype error: ~flow.models.ODataError """ super(ODataErrorResponse, self).__init__(**kwargs) self.error = kwargs.get('error', None) class ODataInnerError(msrest.serialization.Model): """The contents of this object are service-defined. Usually this object contains information that will help debug the service and SHOULD only be used in development environments in order to guard against potential security concerns around information disclosure. :ivar client_request_id: Gets or sets the client provided request ID. :vartype client_request_id: str :ivar service_request_id: Gets or sets the server generated request ID. :vartype service_request_id: str :ivar trace: Gets or sets the exception stack trace. DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT. :vartype trace: str :ivar context: Gets or sets additional context for the exception. DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT. :vartype context: str """ _attribute_map = { 'client_request_id': {'key': 'clientRequestId', 'type': 'str'}, 'service_request_id': {'key': 'serviceRequestId', 'type': 'str'}, 'trace': {'key': 'trace', 'type': 'str'}, 'context': {'key': 'context', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword client_request_id: Gets or sets the client provided request ID. :paramtype client_request_id: str :keyword service_request_id: Gets or sets the server generated request ID. :paramtype service_request_id: str :keyword trace: Gets or sets the exception stack trace. DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT. :paramtype trace: str :keyword context: Gets or sets additional context for the exception. DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT. :paramtype context: str """ super(ODataInnerError, self).__init__(**kwargs) self.client_request_id = kwargs.get('client_request_id', None) self.service_request_id = kwargs.get('service_request_id', None) self.trace = kwargs.get('trace', None) self.context = kwargs.get('context', None) class OutputData(msrest.serialization.Model): """OutputData. :ivar output_location: :vartype output_location: ~flow.models.ExecutionDataLocation :ivar mechanism: Possible values include: "Upload", "Mount", "Hdfs", "Link", "Direct". :vartype mechanism: str or ~flow.models.OutputMechanism :ivar additional_options: :vartype additional_options: ~flow.models.OutputOptions :ivar environment_variable_name: :vartype environment_variable_name: str """ _attribute_map = { 'output_location': {'key': 'outputLocation', 'type': 'ExecutionDataLocation'}, 'mechanism': {'key': 'mechanism', 'type': 'str'}, 'additional_options': {'key': 'additionalOptions', 'type': 'OutputOptions'}, 'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword output_location: :paramtype output_location: ~flow.models.ExecutionDataLocation :keyword mechanism: Possible values include: "Upload", "Mount", "Hdfs", "Link", "Direct". :paramtype mechanism: str or ~flow.models.OutputMechanism :keyword additional_options: :paramtype additional_options: ~flow.models.OutputOptions :keyword environment_variable_name: :paramtype environment_variable_name: str """ super(OutputData, self).__init__(**kwargs) self.output_location = kwargs.get('output_location', None) self.mechanism = kwargs.get('mechanism', None) self.additional_options = kwargs.get('additional_options', None) self.environment_variable_name = kwargs.get('environment_variable_name', None) class OutputDataBinding(msrest.serialization.Model): """OutputDataBinding. :ivar datastore_id: :vartype datastore_id: str :ivar path_on_datastore: :vartype path_on_datastore: str :ivar path_on_compute: :vartype path_on_compute: str :ivar description: :vartype description: str :ivar uri: :vartype uri: ~flow.models.MfeInternalUriReference :ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :vartype mode: str or ~flow.models.DataBindingMode :ivar asset_uri: :vartype asset_uri: str :ivar is_asset_job_output: :vartype is_asset_job_output: bool :ivar job_output_type: Possible values include: "Uri", "Dataset", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :vartype job_output_type: str or ~flow.models.JobOutputType :ivar asset_name: :vartype asset_name: str :ivar asset_version: :vartype asset_version: str :ivar auto_delete_setting: :vartype auto_delete_setting: ~flow.models.AutoDeleteSetting """ _attribute_map = { 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, 'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'MfeInternalUriReference'}, 'mode': {'key': 'mode', 'type': 'str'}, 'asset_uri': {'key': 'assetUri', 'type': 'str'}, 'is_asset_job_output': {'key': 'isAssetJobOutput', 'type': 'bool'}, 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, 'asset_name': {'key': 'assetName', 'type': 'str'}, 'asset_version': {'key': 'assetVersion', 'type': 'str'}, 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, } def __init__( self, **kwargs ): """ :keyword datastore_id: :paramtype datastore_id: str :keyword path_on_datastore: :paramtype path_on_datastore: str :keyword path_on_compute: :paramtype path_on_compute: str :keyword description: :paramtype description: str :keyword uri: :paramtype uri: ~flow.models.MfeInternalUriReference :keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :paramtype mode: str or ~flow.models.DataBindingMode :keyword asset_uri: :paramtype asset_uri: str :keyword is_asset_job_output: :paramtype is_asset_job_output: bool :keyword job_output_type: Possible values include: "Uri", "Dataset", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :paramtype job_output_type: str or ~flow.models.JobOutputType :keyword asset_name: :paramtype asset_name: str :keyword asset_version: :paramtype asset_version: str :keyword auto_delete_setting: :paramtype auto_delete_setting: ~flow.models.AutoDeleteSetting """ super(OutputDataBinding, self).__init__(**kwargs) self.datastore_id = kwargs.get('datastore_id', None) self.path_on_datastore = kwargs.get('path_on_datastore', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.description = kwargs.get('description', None) self.uri = kwargs.get('uri', None) self.mode = kwargs.get('mode', None) self.asset_uri = kwargs.get('asset_uri', None) self.is_asset_job_output = kwargs.get('is_asset_job_output', None) self.job_output_type = kwargs.get('job_output_type', None) self.asset_name = kwargs.get('asset_name', None) self.asset_version = kwargs.get('asset_version', None) self.auto_delete_setting = kwargs.get('auto_delete_setting', None) class OutputDatasetLineage(msrest.serialization.Model): """OutputDatasetLineage. :ivar identifier: :vartype identifier: ~flow.models.DatasetIdentifier :ivar output_type: Possible values include: "RunOutput", "Reference". :vartype output_type: str or ~flow.models.DatasetOutputType :ivar output_details: :vartype output_details: ~flow.models.DatasetOutputDetails """ _attribute_map = { 'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'}, 'output_type': {'key': 'outputType', 'type': 'str'}, 'output_details': {'key': 'outputDetails', 'type': 'DatasetOutputDetails'}, } def __init__( self, **kwargs ): """ :keyword identifier: :paramtype identifier: ~flow.models.DatasetIdentifier :keyword output_type: Possible values include: "RunOutput", "Reference". :paramtype output_type: str or ~flow.models.DatasetOutputType :keyword output_details: :paramtype output_details: ~flow.models.DatasetOutputDetails """ super(OutputDatasetLineage, self).__init__(**kwargs) self.identifier = kwargs.get('identifier', None) self.output_type = kwargs.get('output_type', None) self.output_details = kwargs.get('output_details', None) class OutputDefinition(msrest.serialization.Model): """OutputDefinition. :ivar name: :vartype name: str :ivar type: :vartype type: list[str or ~flow.models.ValueType] :ivar description: :vartype description: str :ivar is_property: :vartype is_property: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': '[str]'}, 'description': {'key': 'description', 'type': 'str'}, 'is_property': {'key': 'isProperty', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: :paramtype type: list[str or ~flow.models.ValueType] :keyword description: :paramtype description: str :keyword is_property: :paramtype is_property: bool """ super(OutputDefinition, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.type = kwargs.get('type', None) self.description = kwargs.get('description', None) self.is_property = kwargs.get('is_property', None) class OutputOptions(msrest.serialization.Model): """OutputOptions. :ivar path_on_compute: :vartype path_on_compute: str :ivar registration_options: :vartype registration_options: ~flow.models.RegistrationOptions :ivar upload_options: :vartype upload_options: ~flow.models.UploadOptions :ivar mount_options: Dictionary of :code:`<string>`. :vartype mount_options: dict[str, str] """ _attribute_map = { 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'registration_options': {'key': 'registrationOptions', 'type': 'RegistrationOptions'}, 'upload_options': {'key': 'uploadOptions', 'type': 'UploadOptions'}, 'mount_options': {'key': 'mountOptions', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword path_on_compute: :paramtype path_on_compute: str :keyword registration_options: :paramtype registration_options: ~flow.models.RegistrationOptions :keyword upload_options: :paramtype upload_options: ~flow.models.UploadOptions :keyword mount_options: Dictionary of :code:`<string>`. :paramtype mount_options: dict[str, str] """ super(OutputOptions, self).__init__(**kwargs) self.path_on_compute = kwargs.get('path_on_compute', None) self.registration_options = kwargs.get('registration_options', None) self.upload_options = kwargs.get('upload_options', None) self.mount_options = kwargs.get('mount_options', None) class OutputSetting(msrest.serialization.Model): """OutputSetting. :ivar name: :vartype name: str :ivar data_store_name: :vartype data_store_name: str :ivar data_store_name_parameter_assignment: :vartype data_store_name_parameter_assignment: ~flow.models.ParameterAssignment :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode :ivar data_store_mode_parameter_assignment: :vartype data_store_mode_parameter_assignment: ~flow.models.ParameterAssignment :ivar path_on_compute: :vartype path_on_compute: str :ivar path_on_compute_parameter_assignment: :vartype path_on_compute_parameter_assignment: ~flow.models.ParameterAssignment :ivar overwrite: :vartype overwrite: bool :ivar data_reference_name: :vartype data_reference_name: str :ivar web_service_port: :vartype web_service_port: str :ivar dataset_registration: :vartype dataset_registration: ~flow.models.DatasetRegistration :ivar dataset_output_options: :vartype dataset_output_options: ~flow.models.DatasetOutputOptions :ivar asset_output_settings: :vartype asset_output_settings: ~flow.models.AssetOutputSettings :ivar parameter_name: :vartype parameter_name: str :ivar asset_output_settings_parameter_name: :vartype asset_output_settings_parameter_name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_name_parameter_assignment': {'key': 'DataStoreNameParameterAssignment', 'type': 'ParameterAssignment'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'data_store_mode_parameter_assignment': {'key': 'DataStoreModeParameterAssignment', 'type': 'ParameterAssignment'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'path_on_compute_parameter_assignment': {'key': 'PathOnComputeParameterAssignment', 'type': 'ParameterAssignment'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'}, 'web_service_port': {'key': 'webServicePort', 'type': 'str'}, 'dataset_registration': {'key': 'datasetRegistration', 'type': 'DatasetRegistration'}, 'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'DatasetOutputOptions'}, 'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AssetOutputSettings'}, 'parameter_name': {'key': 'parameterName', 'type': 'str'}, 'asset_output_settings_parameter_name': {'key': 'AssetOutputSettingsParameterName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_name_parameter_assignment: :paramtype data_store_name_parameter_assignment: ~flow.models.ParameterAssignment :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode :keyword data_store_mode_parameter_assignment: :paramtype data_store_mode_parameter_assignment: ~flow.models.ParameterAssignment :keyword path_on_compute: :paramtype path_on_compute: str :keyword path_on_compute_parameter_assignment: :paramtype path_on_compute_parameter_assignment: ~flow.models.ParameterAssignment :keyword overwrite: :paramtype overwrite: bool :keyword data_reference_name: :paramtype data_reference_name: str :keyword web_service_port: :paramtype web_service_port: str :keyword dataset_registration: :paramtype dataset_registration: ~flow.models.DatasetRegistration :keyword dataset_output_options: :paramtype dataset_output_options: ~flow.models.DatasetOutputOptions :keyword asset_output_settings: :paramtype asset_output_settings: ~flow.models.AssetOutputSettings :keyword parameter_name: :paramtype parameter_name: str :keyword asset_output_settings_parameter_name: :paramtype asset_output_settings_parameter_name: str """ super(OutputSetting, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.data_store_name = kwargs.get('data_store_name', None) self.data_store_name_parameter_assignment = kwargs.get('data_store_name_parameter_assignment', None) self.data_store_mode = kwargs.get('data_store_mode', None) self.data_store_mode_parameter_assignment = kwargs.get('data_store_mode_parameter_assignment', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.path_on_compute_parameter_assignment = kwargs.get('path_on_compute_parameter_assignment', None) self.overwrite = kwargs.get('overwrite', None) self.data_reference_name = kwargs.get('data_reference_name', None) self.web_service_port = kwargs.get('web_service_port', None) self.dataset_registration = kwargs.get('dataset_registration', None) self.dataset_output_options = kwargs.get('dataset_output_options', None) self.asset_output_settings = kwargs.get('asset_output_settings', None) self.parameter_name = kwargs.get('parameter_name', None) self.asset_output_settings_parameter_name = kwargs.get('asset_output_settings_parameter_name', None) class OutputSettingSpec(msrest.serialization.Model): """OutputSettingSpec. :ivar supported_data_store_modes: :vartype supported_data_store_modes: list[str or ~flow.models.AEVADataStoreMode] :ivar default_asset_output_path: :vartype default_asset_output_path: str """ _attribute_map = { 'supported_data_store_modes': {'key': 'supportedDataStoreModes', 'type': '[str]'}, 'default_asset_output_path': {'key': 'defaultAssetOutputPath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword supported_data_store_modes: :paramtype supported_data_store_modes: list[str or ~flow.models.AEVADataStoreMode] :keyword default_asset_output_path: :paramtype default_asset_output_path: str """ super(OutputSettingSpec, self).__init__(**kwargs) self.supported_data_store_modes = kwargs.get('supported_data_store_modes', None) self.default_asset_output_path = kwargs.get('default_asset_output_path', None) class PaginatedDataInfoList(msrest.serialization.Model): """A paginated list of DataInfos. :ivar value: An array of objects of type DataInfo. :vartype value: list[~flow.models.DataInfo] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[DataInfo]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword value: An array of objects of type DataInfo. :paramtype value: list[~flow.models.DataInfo] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedDataInfoList, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.continuation_token = kwargs.get('continuation_token', None) self.next_link = kwargs.get('next_link', None) class PaginatedModelDtoList(msrest.serialization.Model): """A paginated list of ModelDtos. :ivar value: An array of objects of type ModelDto. :vartype value: list[~flow.models.ModelDto] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[ModelDto]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword value: An array of objects of type ModelDto. :paramtype value: list[~flow.models.ModelDto] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedModelDtoList, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.continuation_token = kwargs.get('continuation_token', None) self.next_link = kwargs.get('next_link', None) class PaginatedModuleDtoList(msrest.serialization.Model): """A paginated list of ModuleDtos. :ivar value: An array of objects of type ModuleDto. :vartype value: list[~flow.models.ModuleDto] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[ModuleDto]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword value: An array of objects of type ModuleDto. :paramtype value: list[~flow.models.ModuleDto] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedModuleDtoList, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.continuation_token = kwargs.get('continuation_token', None) self.next_link = kwargs.get('next_link', None) class PaginatedPipelineDraftSummaryList(msrest.serialization.Model): """A paginated list of PipelineDraftSummarys. :ivar value: An array of objects of type PipelineDraftSummary. :vartype value: list[~flow.models.PipelineDraftSummary] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[PipelineDraftSummary]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword value: An array of objects of type PipelineDraftSummary. :paramtype value: list[~flow.models.PipelineDraftSummary] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedPipelineDraftSummaryList, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.continuation_token = kwargs.get('continuation_token', None) self.next_link = kwargs.get('next_link', None) class PaginatedPipelineEndpointSummaryList(msrest.serialization.Model): """A paginated list of PipelineEndpointSummarys. :ivar value: An array of objects of type PipelineEndpointSummary. :vartype value: list[~flow.models.PipelineEndpointSummary] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[PipelineEndpointSummary]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword value: An array of objects of type PipelineEndpointSummary. :paramtype value: list[~flow.models.PipelineEndpointSummary] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedPipelineEndpointSummaryList, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.continuation_token = kwargs.get('continuation_token', None) self.next_link = kwargs.get('next_link', None) class PaginatedPipelineRunSummaryList(msrest.serialization.Model): """A paginated list of PipelineRunSummarys. :ivar value: An array of objects of type PipelineRunSummary. :vartype value: list[~flow.models.PipelineRunSummary] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[PipelineRunSummary]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword value: An array of objects of type PipelineRunSummary. :paramtype value: list[~flow.models.PipelineRunSummary] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedPipelineRunSummaryList, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.continuation_token = kwargs.get('continuation_token', None) self.next_link = kwargs.get('next_link', None) class PaginatedPublishedPipelineSummaryList(msrest.serialization.Model): """A paginated list of PublishedPipelineSummarys. :ivar value: An array of objects of type PublishedPipelineSummary. :vartype value: list[~flow.models.PublishedPipelineSummary] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[PublishedPipelineSummary]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword value: An array of objects of type PublishedPipelineSummary. :paramtype value: list[~flow.models.PublishedPipelineSummary] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedPublishedPipelineSummaryList, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.continuation_token = kwargs.get('continuation_token', None) self.next_link = kwargs.get('next_link', None) class ParallelForControlFlowInfo(msrest.serialization.Model): """ParallelForControlFlowInfo. :ivar parallel_for_items_input: :vartype parallel_for_items_input: ~flow.models.ParameterAssignment """ _attribute_map = { 'parallel_for_items_input': {'key': 'parallelForItemsInput', 'type': 'ParameterAssignment'}, } def __init__( self, **kwargs ): """ :keyword parallel_for_items_input: :paramtype parallel_for_items_input: ~flow.models.ParameterAssignment """ super(ParallelForControlFlowInfo, self).__init__(**kwargs) self.parallel_for_items_input = kwargs.get('parallel_for_items_input', None) class ParallelTaskConfiguration(msrest.serialization.Model): """ParallelTaskConfiguration. :ivar max_retries_per_worker: :vartype max_retries_per_worker: int :ivar worker_count_per_node: :vartype worker_count_per_node: int :ivar terminal_exit_codes: :vartype terminal_exit_codes: list[int] :ivar configuration: Dictionary of :code:`<string>`. :vartype configuration: dict[str, str] """ _attribute_map = { 'max_retries_per_worker': {'key': 'maxRetriesPerWorker', 'type': 'int'}, 'worker_count_per_node': {'key': 'workerCountPerNode', 'type': 'int'}, 'terminal_exit_codes': {'key': 'terminalExitCodes', 'type': '[int]'}, 'configuration': {'key': 'configuration', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword max_retries_per_worker: :paramtype max_retries_per_worker: int :keyword worker_count_per_node: :paramtype worker_count_per_node: int :keyword terminal_exit_codes: :paramtype terminal_exit_codes: list[int] :keyword configuration: Dictionary of :code:`<string>`. :paramtype configuration: dict[str, str] """ super(ParallelTaskConfiguration, self).__init__(**kwargs) self.max_retries_per_worker = kwargs.get('max_retries_per_worker', None) self.worker_count_per_node = kwargs.get('worker_count_per_node', None) self.terminal_exit_codes = kwargs.get('terminal_exit_codes', None) self.configuration = kwargs.get('configuration', None) class Parameter(msrest.serialization.Model): """Parameter. :ivar name: :vartype name: str :ivar documentation: :vartype documentation: str :ivar default_value: :vartype default_value: str :ivar is_optional: :vartype is_optional: bool :ivar min_max_rules: :vartype min_max_rules: list[~flow.models.MinMaxParameterRule] :ivar enum_rules: :vartype enum_rules: list[~flow.models.EnumParameterRule] :ivar type: Possible values include: "Int", "Double", "Bool", "String", "Undefined". :vartype type: str or ~flow.models.ParameterType :ivar label: :vartype label: str :ivar group_names: :vartype group_names: list[str] :ivar argument_name: :vartype argument_name: str :ivar ui_hint: :vartype ui_hint: ~flow.models.UIParameterHint """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'documentation': {'key': 'documentation', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'min_max_rules': {'key': 'minMaxRules', 'type': '[MinMaxParameterRule]'}, 'enum_rules': {'key': 'enumRules', 'type': '[EnumParameterRule]'}, 'type': {'key': 'type', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'group_names': {'key': 'groupNames', 'type': '[str]'}, 'argument_name': {'key': 'argumentName', 'type': 'str'}, 'ui_hint': {'key': 'uiHint', 'type': 'UIParameterHint'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword documentation: :paramtype documentation: str :keyword default_value: :paramtype default_value: str :keyword is_optional: :paramtype is_optional: bool :keyword min_max_rules: :paramtype min_max_rules: list[~flow.models.MinMaxParameterRule] :keyword enum_rules: :paramtype enum_rules: list[~flow.models.EnumParameterRule] :keyword type: Possible values include: "Int", "Double", "Bool", "String", "Undefined". :paramtype type: str or ~flow.models.ParameterType :keyword label: :paramtype label: str :keyword group_names: :paramtype group_names: list[str] :keyword argument_name: :paramtype argument_name: str :keyword ui_hint: :paramtype ui_hint: ~flow.models.UIParameterHint """ super(Parameter, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.documentation = kwargs.get('documentation', None) self.default_value = kwargs.get('default_value', None) self.is_optional = kwargs.get('is_optional', None) self.min_max_rules = kwargs.get('min_max_rules', None) self.enum_rules = kwargs.get('enum_rules', None) self.type = kwargs.get('type', None) self.label = kwargs.get('label', None) self.group_names = kwargs.get('group_names', None) self.argument_name = kwargs.get('argument_name', None) self.ui_hint = kwargs.get('ui_hint', None) class ParameterAssignment(msrest.serialization.Model): """ParameterAssignment. :ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition". :vartype value_type: str or ~flow.models.ParameterValueType :ivar assignments_to_concatenate: :vartype assignments_to_concatenate: list[~flow.models.ParameterAssignment] :ivar data_path_assignment: :vartype data_path_assignment: ~flow.models.LegacyDataPath :ivar data_set_definition_value_assignment: :vartype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue :ivar name: :vartype name: str :ivar value: :vartype value: str """ _attribute_map = { 'value_type': {'key': 'valueType', 'type': 'str'}, 'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[ParameterAssignment]'}, 'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'LegacyDataPath'}, 'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'DataSetDefinitionValue'}, 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition". :paramtype value_type: str or ~flow.models.ParameterValueType :keyword assignments_to_concatenate: :paramtype assignments_to_concatenate: list[~flow.models.ParameterAssignment] :keyword data_path_assignment: :paramtype data_path_assignment: ~flow.models.LegacyDataPath :keyword data_set_definition_value_assignment: :paramtype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue :keyword name: :paramtype name: str :keyword value: :paramtype value: str """ super(ParameterAssignment, self).__init__(**kwargs) self.value_type = kwargs.get('value_type', None) self.assignments_to_concatenate = kwargs.get('assignments_to_concatenate', None) self.data_path_assignment = kwargs.get('data_path_assignment', None) self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None) self.name = kwargs.get('name', None) self.value = kwargs.get('value', None) class ParameterDefinition(msrest.serialization.Model): """ParameterDefinition. :ivar name: :vartype name: str :ivar type: :vartype type: str :ivar value: :vartype value: str :ivar is_optional: :vartype is_optional: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: :paramtype type: str :keyword value: :paramtype value: str :keyword is_optional: :paramtype is_optional: bool """ super(ParameterDefinition, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.type = kwargs.get('type', None) self.value = kwargs.get('value', None) self.is_optional = kwargs.get('is_optional', None) class PatchFlowRequest(msrest.serialization.Model): """PatchFlowRequest. :ivar flow_patch_operation_type: Possible values include: "ArchiveFlow", "RestoreFlow", "ExportFlowToFile". :vartype flow_patch_operation_type: str or ~flow.models.FlowPatchOperationType :ivar flow_definition_file_path: :vartype flow_definition_file_path: str """ _attribute_map = { 'flow_patch_operation_type': {'key': 'flowPatchOperationType', 'type': 'str'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword flow_patch_operation_type: Possible values include: "ArchiveFlow", "RestoreFlow", "ExportFlowToFile". :paramtype flow_patch_operation_type: str or ~flow.models.FlowPatchOperationType :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str """ super(PatchFlowRequest, self).__init__(**kwargs) self.flow_patch_operation_type = kwargs.get('flow_patch_operation_type', None) self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None) class Pipeline(msrest.serialization.Model): """Pipeline. :ivar run_id: :vartype run_id: str :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar default_datastore_name: :vartype default_datastore_name: str :ivar component_jobs: This is a dictionary. :vartype component_jobs: dict[str, ~flow.models.ComponentJob] :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.PipelineInput] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.PipelineOutput] """ _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'default_datastore_name': {'key': 'defaultDatastoreName', 'type': 'str'}, 'component_jobs': {'key': 'componentJobs', 'type': '{ComponentJob}'}, 'inputs': {'key': 'inputs', 'type': '{PipelineInput}'}, 'outputs': {'key': 'outputs', 'type': '{PipelineOutput}'}, } def __init__( self, **kwargs ): """ :keyword run_id: :paramtype run_id: str :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword default_datastore_name: :paramtype default_datastore_name: str :keyword component_jobs: This is a dictionary. :paramtype component_jobs: dict[str, ~flow.models.ComponentJob] :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.PipelineInput] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.PipelineOutput] """ super(Pipeline, self).__init__(**kwargs) self.run_id = kwargs.get('run_id', None) self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None) self.default_datastore_name = kwargs.get('default_datastore_name', None) self.component_jobs = kwargs.get('component_jobs', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) class PipelineDraft(msrest.serialization.Model): """PipelineDraft. :ivar graph_draft_id: :vartype graph_draft_id: str :ivar source_pipeline_run_id: :vartype source_pipeline_run_id: str :ivar latest_pipeline_run_id: :vartype latest_pipeline_run_id: str :ivar latest_run_experiment_name: :vartype latest_run_experiment_name: str :ivar latest_run_experiment_id: :vartype latest_run_experiment_id: str :ivar is_latest_run_experiment_archived: :vartype is_latest_run_experiment_archived: bool :ivar status: :vartype status: ~flow.models.PipelineStatus :ivar graph_detail: :vartype graph_detail: ~flow.models.PipelineRunGraphDetail :ivar real_time_endpoint_info: :vartype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo :ivar linked_pipelines_info: :vartype linked_pipelines_info: list[~flow.models.LinkedPipelineInfo] :ivar nodes_in_draft: :vartype nodes_in_draft: list[str] :ivar studio_migration_info: :vartype studio_migration_info: ~flow.models.StudioMigrationInfo :ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :ivar pipeline_run_setting_parameters: :vartype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter] :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar continue_run_on_failed_optional_input: :vartype continue_run_on_failed_optional_input: bool :ivar default_compute: :vartype default_compute: ~flow.models.ComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.DatastoreSetting :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.CloudPrioritySetting :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar pipeline_timeout: :vartype pipeline_timeout: int :ivar identity_config: :vartype identity_config: ~flow.models.IdentitySetting :ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :vartype graph_components_mode: str or ~flow.models.GraphComponentsMode :ivar name: :vartype name: str :ivar last_edited_by: :vartype last_edited_by: str :ivar created_by: :vartype created_by: str :ivar description: :vartype description: str :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'}, 'source_pipeline_run_id': {'key': 'sourcePipelineRunId', 'type': 'str'}, 'latest_pipeline_run_id': {'key': 'latestPipelineRunId', 'type': 'str'}, 'latest_run_experiment_name': {'key': 'latestRunExperimentName', 'type': 'str'}, 'latest_run_experiment_id': {'key': 'latestRunExperimentId', 'type': 'str'}, 'is_latest_run_experiment_archived': {'key': 'isLatestRunExperimentArchived', 'type': 'bool'}, 'status': {'key': 'status', 'type': 'PipelineStatus'}, 'graph_detail': {'key': 'graphDetail', 'type': 'PipelineRunGraphDetail'}, 'real_time_endpoint_info': {'key': 'realTimeEndpointInfo', 'type': 'RealTimeEndpointInfo'}, 'linked_pipelines_info': {'key': 'linkedPipelinesInfo', 'type': '[LinkedPipelineInfo]'}, 'nodes_in_draft': {'key': 'nodesInDraft', 'type': '[str]'}, 'studio_migration_info': {'key': 'studioMigrationInfo', 'type': 'StudioMigrationInfo'}, 'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'}, 'pipeline_run_setting_parameters': {'key': 'pipelineRunSettingParameters', 'type': '[RunSettingParameter]'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'continue_run_on_failed_optional_input': {'key': 'continueRunOnFailedOptionalInput', 'type': 'bool'}, 'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'}, 'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'pipeline_timeout': {'key': 'pipelineTimeout', 'type': 'int'}, 'identity_config': {'key': 'identityConfig', 'type': 'IdentitySetting'}, 'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'}, 'created_by': {'key': 'createdBy', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword graph_draft_id: :paramtype graph_draft_id: str :keyword source_pipeline_run_id: :paramtype source_pipeline_run_id: str :keyword latest_pipeline_run_id: :paramtype latest_pipeline_run_id: str :keyword latest_run_experiment_name: :paramtype latest_run_experiment_name: str :keyword latest_run_experiment_id: :paramtype latest_run_experiment_id: str :keyword is_latest_run_experiment_archived: :paramtype is_latest_run_experiment_archived: bool :keyword status: :paramtype status: ~flow.models.PipelineStatus :keyword graph_detail: :paramtype graph_detail: ~flow.models.PipelineRunGraphDetail :keyword real_time_endpoint_info: :paramtype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo :keyword linked_pipelines_info: :paramtype linked_pipelines_info: list[~flow.models.LinkedPipelineInfo] :keyword nodes_in_draft: :paramtype nodes_in_draft: list[str] :keyword studio_migration_info: :paramtype studio_migration_info: ~flow.models.StudioMigrationInfo :keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :keyword pipeline_run_setting_parameters: :paramtype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter] :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword continue_run_on_failed_optional_input: :paramtype continue_run_on_failed_optional_input: bool :keyword default_compute: :paramtype default_compute: ~flow.models.ComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.DatastoreSetting :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword pipeline_timeout: :paramtype pipeline_timeout: int :keyword identity_config: :paramtype identity_config: ~flow.models.IdentitySetting :keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode :keyword name: :paramtype name: str :keyword last_edited_by: :paramtype last_edited_by: str :keyword created_by: :paramtype created_by: str :keyword description: :paramtype description: str :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineDraft, self).__init__(**kwargs) self.graph_draft_id = kwargs.get('graph_draft_id', None) self.source_pipeline_run_id = kwargs.get('source_pipeline_run_id', None) self.latest_pipeline_run_id = kwargs.get('latest_pipeline_run_id', None) self.latest_run_experiment_name = kwargs.get('latest_run_experiment_name', None) self.latest_run_experiment_id = kwargs.get('latest_run_experiment_id', None) self.is_latest_run_experiment_archived = kwargs.get('is_latest_run_experiment_archived', None) self.status = kwargs.get('status', None) self.graph_detail = kwargs.get('graph_detail', None) self.real_time_endpoint_info = kwargs.get('real_time_endpoint_info', None) self.linked_pipelines_info = kwargs.get('linked_pipelines_info', None) self.nodes_in_draft = kwargs.get('nodes_in_draft', None) self.studio_migration_info = kwargs.get('studio_migration_info', None) self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None) self.pipeline_run_setting_parameters = kwargs.get('pipeline_run_setting_parameters', None) self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None) self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None) self.continue_run_on_failed_optional_input = kwargs.get('continue_run_on_failed_optional_input', None) self.default_compute = kwargs.get('default_compute', None) self.default_datastore = kwargs.get('default_datastore', None) self.default_cloud_priority = kwargs.get('default_cloud_priority', None) self.enforce_rerun = kwargs.get('enforce_rerun', None) self.pipeline_parameters = kwargs.get('pipeline_parameters', None) self.data_path_assignments = kwargs.get('data_path_assignments', None) self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None) self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None) self.pipeline_timeout = kwargs.get('pipeline_timeout', None) self.identity_config = kwargs.get('identity_config', None) self.graph_components_mode = kwargs.get('graph_components_mode', None) self.name = kwargs.get('name', None) self.last_edited_by = kwargs.get('last_edited_by', None) self.created_by = kwargs.get('created_by', None) self.description = kwargs.get('description', None) self.pipeline_type = kwargs.get('pipeline_type', None) self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.entity_status = kwargs.get('entity_status', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class PipelineDraftStepDetails(msrest.serialization.Model): """PipelineDraftStepDetails. :ivar run_id: :vartype run_id: str :ivar target: :vartype target: str :ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype status: str or ~flow.models.RunStatus :ivar status_detail: :vartype status_detail: str :ivar parent_run_id: :vartype parent_run_id: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar is_reused: :vartype is_reused: bool :ivar reused_run_id: :vartype reused_run_id: str :ivar reused_pipeline_run_id: :vartype reused_pipeline_run_id: str :ivar logs: This is a dictionary. :vartype logs: dict[str, str] :ivar output_log: :vartype output_log: str :ivar run_configuration: :vartype run_configuration: ~flow.models.RunConfiguration :ivar outputs: This is a dictionary. :vartype outputs: dict[str, str] :ivar port_outputs: This is a dictionary. :vartype port_outputs: dict[str, ~flow.models.PortOutputInfo] :ivar is_experiment_archived: :vartype is_experiment_archived: bool """ _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'is_reused': {'key': 'isReused', 'type': 'bool'}, 'reused_run_id': {'key': 'reusedRunId', 'type': 'str'}, 'reused_pipeline_run_id': {'key': 'reusedPipelineRunId', 'type': 'str'}, 'logs': {'key': 'logs', 'type': '{str}'}, 'output_log': {'key': 'outputLog', 'type': 'str'}, 'run_configuration': {'key': 'runConfiguration', 'type': 'RunConfiguration'}, 'outputs': {'key': 'outputs', 'type': '{str}'}, 'port_outputs': {'key': 'portOutputs', 'type': '{PortOutputInfo}'}, 'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword run_id: :paramtype run_id: str :keyword target: :paramtype target: str :keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype status: str or ~flow.models.RunStatus :keyword status_detail: :paramtype status_detail: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword is_reused: :paramtype is_reused: bool :keyword reused_run_id: :paramtype reused_run_id: str :keyword reused_pipeline_run_id: :paramtype reused_pipeline_run_id: str :keyword logs: This is a dictionary. :paramtype logs: dict[str, str] :keyword output_log: :paramtype output_log: str :keyword run_configuration: :paramtype run_configuration: ~flow.models.RunConfiguration :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, str] :keyword port_outputs: This is a dictionary. :paramtype port_outputs: dict[str, ~flow.models.PortOutputInfo] :keyword is_experiment_archived: :paramtype is_experiment_archived: bool """ super(PipelineDraftStepDetails, self).__init__(**kwargs) self.run_id = kwargs.get('run_id', None) self.target = kwargs.get('target', None) self.status = kwargs.get('status', None) self.status_detail = kwargs.get('status_detail', None) self.parent_run_id = kwargs.get('parent_run_id', None) self.start_time = kwargs.get('start_time', None) self.end_time = kwargs.get('end_time', None) self.is_reused = kwargs.get('is_reused', None) self.reused_run_id = kwargs.get('reused_run_id', None) self.reused_pipeline_run_id = kwargs.get('reused_pipeline_run_id', None) self.logs = kwargs.get('logs', None) self.output_log = kwargs.get('output_log', None) self.run_configuration = kwargs.get('run_configuration', None) self.outputs = kwargs.get('outputs', None) self.port_outputs = kwargs.get('port_outputs', None) self.is_experiment_archived = kwargs.get('is_experiment_archived', None) class PipelineDraftSummary(msrest.serialization.Model): """PipelineDraftSummary. :ivar name: :vartype name: str :ivar last_edited_by: :vartype last_edited_by: str :ivar created_by: :vartype created_by: str :ivar description: :vartype description: str :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'}, 'created_by': {'key': 'createdBy', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword last_edited_by: :paramtype last_edited_by: str :keyword created_by: :paramtype created_by: str :keyword description: :paramtype description: str :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineDraftSummary, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.last_edited_by = kwargs.get('last_edited_by', None) self.created_by = kwargs.get('created_by', None) self.description = kwargs.get('description', None) self.pipeline_type = kwargs.get('pipeline_type', None) self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.entity_status = kwargs.get('entity_status', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class PipelineEndpoint(msrest.serialization.Model): """PipelineEndpoint. :ivar default_version: :vartype default_version: str :ivar default_pipeline_id: :vartype default_pipeline_id: str :ivar default_graph_id: :vartype default_graph_id: str :ivar rest_endpoint: :vartype rest_endpoint: str :ivar published_date: :vartype published_date: ~datetime.datetime :ivar published_by: :vartype published_by: str :ivar parameters: This is a dictionary. :vartype parameters: dict[str, str] :ivar data_set_definition_value_assignment: This is a dictionary. :vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue] :ivar default_pipeline_name: :vartype default_pipeline_name: str :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar updated_by: :vartype updated_by: str :ivar swagger_url: :vartype swagger_url: str :ivar last_run_time: :vartype last_run_time: ~datetime.datetime :ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :vartype last_run_status: str or ~flow.models.PipelineRunStatusCode :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'default_version': {'key': 'defaultVersion', 'type': 'str'}, 'default_pipeline_id': {'key': 'defaultPipelineId', 'type': 'str'}, 'default_graph_id': {'key': 'defaultGraphId', 'type': 'str'}, 'rest_endpoint': {'key': 'restEndpoint', 'type': 'str'}, 'published_date': {'key': 'publishedDate', 'type': 'iso-8601'}, 'published_by': {'key': 'publishedBy', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'}, 'default_pipeline_name': {'key': 'defaultPipelineName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'updated_by': {'key': 'updatedBy', 'type': 'str'}, 'swagger_url': {'key': 'swaggerUrl', 'type': 'str'}, 'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'}, 'last_run_status': {'key': 'lastRunStatus', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword default_version: :paramtype default_version: str :keyword default_pipeline_id: :paramtype default_pipeline_id: str :keyword default_graph_id: :paramtype default_graph_id: str :keyword rest_endpoint: :paramtype rest_endpoint: str :keyword published_date: :paramtype published_date: ~datetime.datetime :keyword published_by: :paramtype published_by: str :keyword parameters: This is a dictionary. :paramtype parameters: dict[str, str] :keyword data_set_definition_value_assignment: This is a dictionary. :paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue] :keyword default_pipeline_name: :paramtype default_pipeline_name: str :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword updated_by: :paramtype updated_by: str :keyword swagger_url: :paramtype swagger_url: str :keyword last_run_time: :paramtype last_run_time: ~datetime.datetime :keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineEndpoint, self).__init__(**kwargs) self.default_version = kwargs.get('default_version', None) self.default_pipeline_id = kwargs.get('default_pipeline_id', None) self.default_graph_id = kwargs.get('default_graph_id', None) self.rest_endpoint = kwargs.get('rest_endpoint', None) self.published_date = kwargs.get('published_date', None) self.published_by = kwargs.get('published_by', None) self.parameters = kwargs.get('parameters', None) self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None) self.default_pipeline_name = kwargs.get('default_pipeline_name', None) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.updated_by = kwargs.get('updated_by', None) self.swagger_url = kwargs.get('swagger_url', None) self.last_run_time = kwargs.get('last_run_time', None) self.last_run_status = kwargs.get('last_run_status', None) self.tags = kwargs.get('tags', None) self.entity_status = kwargs.get('entity_status', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class PipelineEndpointSummary(msrest.serialization.Model): """PipelineEndpointSummary. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar updated_by: :vartype updated_by: str :ivar swagger_url: :vartype swagger_url: str :ivar last_run_time: :vartype last_run_time: ~datetime.datetime :ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :vartype last_run_status: str or ~flow.models.PipelineRunStatusCode :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'updated_by': {'key': 'updatedBy', 'type': 'str'}, 'swagger_url': {'key': 'swaggerUrl', 'type': 'str'}, 'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'}, 'last_run_status': {'key': 'lastRunStatus', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword updated_by: :paramtype updated_by: str :keyword swagger_url: :paramtype swagger_url: str :keyword last_run_time: :paramtype last_run_time: ~datetime.datetime :keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineEndpointSummary, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.updated_by = kwargs.get('updated_by', None) self.swagger_url = kwargs.get('swagger_url', None) self.last_run_time = kwargs.get('last_run_time', None) self.last_run_status = kwargs.get('last_run_status', None) self.tags = kwargs.get('tags', None) self.entity_status = kwargs.get('entity_status', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class PipelineGraph(msrest.serialization.Model): """PipelineGraph. :ivar graph_module_dtos: :vartype graph_module_dtos: list[~flow.models.ModuleDto] :ivar graph_data_sources: :vartype graph_data_sources: list[~flow.models.DataInfo] :ivar graphs: This is a dictionary. :vartype graphs: dict[str, ~flow.models.PipelineGraph] :ivar graph_drafts: This is a dictionary. :vartype graph_drafts: dict[str, ~flow.models.PipelineGraph] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar sub_pipelines_info: :vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo :ivar referenced_node_id: :vartype referenced_node_id: str :ivar pipeline_run_setting_parameters: :vartype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter] :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar real_time_endpoint_info: :vartype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo :ivar node_telemetry_meta_infos: :vartype node_telemetry_meta_infos: list[~flow.models.NodeTelemetryMetaInfo] :ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :vartype graph_components_mode: str or ~flow.models.GraphComponentsMode :ivar module_nodes: :vartype module_nodes: list[~flow.models.GraphModuleNode] :ivar dataset_nodes: :vartype dataset_nodes: list[~flow.models.GraphDatasetNode] :ivar sub_graph_nodes: :vartype sub_graph_nodes: list[~flow.models.GraphReferenceNode] :ivar control_reference_nodes: :vartype control_reference_nodes: list[~flow.models.GraphControlReferenceNode] :ivar control_nodes: :vartype control_nodes: list[~flow.models.GraphControlNode] :ivar edges: :vartype edges: list[~flow.models.GraphEdge] :ivar entity_interface: :vartype entity_interface: ~flow.models.EntityInterface :ivar graph_layout: :vartype graph_layout: ~flow.models.GraphLayout :ivar created_by: :vartype created_by: ~flow.models.CreatedBy :ivar last_updated_by: :vartype last_updated_by: ~flow.models.CreatedBy :ivar default_compute: :vartype default_compute: ~flow.models.ComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.DatastoreSetting :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.CloudPrioritySetting :ivar extended_properties: This is a dictionary. :vartype extended_properties: dict[str, str] :ivar parent_sub_graph_module_ids: :vartype parent_sub_graph_module_ids: list[str] :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'graph_module_dtos': {'key': 'graphModuleDtos', 'type': '[ModuleDto]'}, 'graph_data_sources': {'key': 'graphDataSources', 'type': '[DataInfo]'}, 'graphs': {'key': 'graphs', 'type': '{PipelineGraph}'}, 'graph_drafts': {'key': 'graphDrafts', 'type': '{PipelineGraph}'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'}, 'referenced_node_id': {'key': 'referencedNodeId', 'type': 'str'}, 'pipeline_run_setting_parameters': {'key': 'pipelineRunSettingParameters', 'type': '[RunSettingParameter]'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'real_time_endpoint_info': {'key': 'realTimeEndpointInfo', 'type': 'RealTimeEndpointInfo'}, 'node_telemetry_meta_infos': {'key': 'nodeTelemetryMetaInfos', 'type': '[NodeTelemetryMetaInfo]'}, 'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'}, 'module_nodes': {'key': 'moduleNodes', 'type': '[GraphModuleNode]'}, 'dataset_nodes': {'key': 'datasetNodes', 'type': '[GraphDatasetNode]'}, 'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[GraphReferenceNode]'}, 'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[GraphControlReferenceNode]'}, 'control_nodes': {'key': 'controlNodes', 'type': '[GraphControlNode]'}, 'edges': {'key': 'edges', 'type': '[GraphEdge]'}, 'entity_interface': {'key': 'entityInterface', 'type': 'EntityInterface'}, 'graph_layout': {'key': 'graphLayout', 'type': 'GraphLayout'}, 'created_by': {'key': 'createdBy', 'type': 'CreatedBy'}, 'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'}, 'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'}, 'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'}, 'extended_properties': {'key': 'extendedProperties', 'type': '{str}'}, 'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword graph_module_dtos: :paramtype graph_module_dtos: list[~flow.models.ModuleDto] :keyword graph_data_sources: :paramtype graph_data_sources: list[~flow.models.DataInfo] :keyword graphs: This is a dictionary. :paramtype graphs: dict[str, ~flow.models.PipelineGraph] :keyword graph_drafts: This is a dictionary. :paramtype graph_drafts: dict[str, ~flow.models.PipelineGraph] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword sub_pipelines_info: :paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo :keyword referenced_node_id: :paramtype referenced_node_id: str :keyword pipeline_run_setting_parameters: :paramtype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter] :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword real_time_endpoint_info: :paramtype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo :keyword node_telemetry_meta_infos: :paramtype node_telemetry_meta_infos: list[~flow.models.NodeTelemetryMetaInfo] :keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode :keyword module_nodes: :paramtype module_nodes: list[~flow.models.GraphModuleNode] :keyword dataset_nodes: :paramtype dataset_nodes: list[~flow.models.GraphDatasetNode] :keyword sub_graph_nodes: :paramtype sub_graph_nodes: list[~flow.models.GraphReferenceNode] :keyword control_reference_nodes: :paramtype control_reference_nodes: list[~flow.models.GraphControlReferenceNode] :keyword control_nodes: :paramtype control_nodes: list[~flow.models.GraphControlNode] :keyword edges: :paramtype edges: list[~flow.models.GraphEdge] :keyword entity_interface: :paramtype entity_interface: ~flow.models.EntityInterface :keyword graph_layout: :paramtype graph_layout: ~flow.models.GraphLayout :keyword created_by: :paramtype created_by: ~flow.models.CreatedBy :keyword last_updated_by: :paramtype last_updated_by: ~flow.models.CreatedBy :keyword default_compute: :paramtype default_compute: ~flow.models.ComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.DatastoreSetting :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting :keyword extended_properties: This is a dictionary. :paramtype extended_properties: dict[str, str] :keyword parent_sub_graph_module_ids: :paramtype parent_sub_graph_module_ids: list[str] :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineGraph, self).__init__(**kwargs) self.graph_module_dtos = kwargs.get('graph_module_dtos', None) self.graph_data_sources = kwargs.get('graph_data_sources', None) self.graphs = kwargs.get('graphs', None) self.graph_drafts = kwargs.get('graph_drafts', None) self.module_node_run_settings = kwargs.get('module_node_run_settings', None) self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None) self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None) self.referenced_node_id = kwargs.get('referenced_node_id', None) self.pipeline_run_setting_parameters = kwargs.get('pipeline_run_setting_parameters', None) self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None) self.real_time_endpoint_info = kwargs.get('real_time_endpoint_info', None) self.node_telemetry_meta_infos = kwargs.get('node_telemetry_meta_infos', None) self.graph_components_mode = kwargs.get('graph_components_mode', None) self.module_nodes = kwargs.get('module_nodes', None) self.dataset_nodes = kwargs.get('dataset_nodes', None) self.sub_graph_nodes = kwargs.get('sub_graph_nodes', None) self.control_reference_nodes = kwargs.get('control_reference_nodes', None) self.control_nodes = kwargs.get('control_nodes', None) self.edges = kwargs.get('edges', None) self.entity_interface = kwargs.get('entity_interface', None) self.graph_layout = kwargs.get('graph_layout', None) self.created_by = kwargs.get('created_by', None) self.last_updated_by = kwargs.get('last_updated_by', None) self.default_compute = kwargs.get('default_compute', None) self.default_datastore = kwargs.get('default_datastore', None) self.default_cloud_priority = kwargs.get('default_cloud_priority', None) self.extended_properties = kwargs.get('extended_properties', None) self.parent_sub_graph_module_ids = kwargs.get('parent_sub_graph_module_ids', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class PipelineInput(msrest.serialization.Model): """PipelineInput. :ivar data: :vartype data: ~flow.models.InputData """ _attribute_map = { 'data': {'key': 'data', 'type': 'InputData'}, } def __init__( self, **kwargs ): """ :keyword data: :paramtype data: ~flow.models.InputData """ super(PipelineInput, self).__init__(**kwargs) self.data = kwargs.get('data', None) class PipelineJob(msrest.serialization.Model): """PipelineJob. :ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :vartype job_type: str or ~flow.models.JobType :ivar pipeline_job_type: The only acceptable values to pass in are None and "AzureML". The default value is None. :vartype pipeline_job_type: str :ivar pipeline: :vartype pipeline: ~flow.models.Pipeline :ivar compute_id: :vartype compute_id: str :ivar run_id: :vartype run_id: str :ivar settings: Anything. :vartype settings: any :ivar component_jobs: This is a dictionary. :vartype component_jobs: dict[str, ~flow.models.MfeInternalV20211001ComponentJob] :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.JobInput] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.JobOutput] :ivar bindings: :vartype bindings: list[~flow.models.Binding] :ivar jobs: This is a dictionary. :vartype jobs: dict[str, any] :ivar input_bindings: This is a dictionary. :vartype input_bindings: dict[str, ~flow.models.InputDataBinding] :ivar output_bindings: This is a dictionary. :vartype output_bindings: dict[str, ~flow.models.OutputDataBinding] :ivar source_job_id: :vartype source_job_id: str :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled", "InProgress". :vartype provisioning_state: str or ~flow.models.JobProvisioningState :ivar parent_job_name: :vartype parent_job_name: str :ivar display_name: :vartype display_name: str :ivar experiment_name: :vartype experiment_name: str :ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". :vartype status: str or ~flow.models.JobStatus :ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`. :vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint] :ivar identity: :vartype identity: ~flow.models.MfeInternalIdentityConfiguration :ivar compute: :vartype compute: ~flow.models.ComputeConfiguration :ivar priority: :vartype priority: int :ivar output: :vartype output: ~flow.models.JobOutputArtifacts :ivar is_archived: :vartype is_archived: bool :ivar schedule: :vartype schedule: ~flow.models.ScheduleBase :ivar component_id: :vartype component_id: str :ivar notification_setting: :vartype notification_setting: ~flow.models.NotificationSetting :ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`. :vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration] :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'job_type': {'key': 'jobType', 'type': 'str'}, 'pipeline_job_type': {'key': 'pipelineJobType', 'type': 'str'}, 'pipeline': {'key': 'pipeline', 'type': 'Pipeline'}, 'compute_id': {'key': 'computeId', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'settings': {'key': 'settings', 'type': 'object'}, 'component_jobs': {'key': 'componentJobs', 'type': '{MfeInternalV20211001ComponentJob}'}, 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, 'bindings': {'key': 'bindings', 'type': '[Binding]'}, 'jobs': {'key': 'jobs', 'type': '{object}'}, 'input_bindings': {'key': 'inputBindings', 'type': '{InputDataBinding}'}, 'output_bindings': {'key': 'outputBindings', 'type': '{OutputDataBinding}'}, 'source_job_id': {'key': 'sourceJobId', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'parent_job_name': {'key': 'parentJobName', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, 'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'}, 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, 'priority': {'key': 'priority', 'type': 'int'}, 'output': {'key': 'output', 'type': 'JobOutputArtifacts'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'schedule': {'key': 'schedule', 'type': 'ScheduleBase'}, 'component_id': {'key': 'componentId', 'type': 'str'}, 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :paramtype job_type: str or ~flow.models.JobType :keyword pipeline_job_type: The only acceptable values to pass in are None and "AzureML". The default value is None. :paramtype pipeline_job_type: str :keyword pipeline: :paramtype pipeline: ~flow.models.Pipeline :keyword compute_id: :paramtype compute_id: str :keyword run_id: :paramtype run_id: str :keyword settings: Anything. :paramtype settings: any :keyword component_jobs: This is a dictionary. :paramtype component_jobs: dict[str, ~flow.models.MfeInternalV20211001ComponentJob] :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.JobInput] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.JobOutput] :keyword bindings: :paramtype bindings: list[~flow.models.Binding] :keyword jobs: This is a dictionary. :paramtype jobs: dict[str, any] :keyword input_bindings: This is a dictionary. :paramtype input_bindings: dict[str, ~flow.models.InputDataBinding] :keyword output_bindings: This is a dictionary. :paramtype output_bindings: dict[str, ~flow.models.OutputDataBinding] :keyword source_job_id: :paramtype source_job_id: str :keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled", "InProgress". :paramtype provisioning_state: str or ~flow.models.JobProvisioningState :keyword parent_job_name: :paramtype parent_job_name: str :keyword display_name: :paramtype display_name: str :keyword experiment_name: :paramtype experiment_name: str :keyword status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". :paramtype status: str or ~flow.models.JobStatus :keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`. :paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint] :keyword identity: :paramtype identity: ~flow.models.MfeInternalIdentityConfiguration :keyword compute: :paramtype compute: ~flow.models.ComputeConfiguration :keyword priority: :paramtype priority: int :keyword output: :paramtype output: ~flow.models.JobOutputArtifacts :keyword is_archived: :paramtype is_archived: bool :keyword schedule: :paramtype schedule: ~flow.models.ScheduleBase :keyword component_id: :paramtype component_id: str :keyword notification_setting: :paramtype notification_setting: ~flow.models.NotificationSetting :keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`. :paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration] :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(PipelineJob, self).__init__(**kwargs) self.job_type = kwargs.get('job_type', None) self.pipeline_job_type = kwargs.get('pipeline_job_type', None) self.pipeline = kwargs.get('pipeline', None) self.compute_id = kwargs.get('compute_id', None) self.run_id = kwargs.get('run_id', None) self.settings = kwargs.get('settings', None) self.component_jobs = kwargs.get('component_jobs', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.bindings = kwargs.get('bindings', None) self.jobs = kwargs.get('jobs', None) self.input_bindings = kwargs.get('input_bindings', None) self.output_bindings = kwargs.get('output_bindings', None) self.source_job_id = kwargs.get('source_job_id', None) self.provisioning_state = kwargs.get('provisioning_state', None) self.parent_job_name = kwargs.get('parent_job_name', None) self.display_name = kwargs.get('display_name', None) self.experiment_name = kwargs.get('experiment_name', None) self.status = kwargs.get('status', None) self.interaction_endpoints = kwargs.get('interaction_endpoints', None) self.identity = kwargs.get('identity', None) self.compute = kwargs.get('compute', None) self.priority = kwargs.get('priority', None) self.output = kwargs.get('output', None) self.is_archived = kwargs.get('is_archived', None) self.schedule = kwargs.get('schedule', None) self.component_id = kwargs.get('component_id', None) self.notification_setting = kwargs.get('notification_setting', None) self.secrets_configuration = kwargs.get('secrets_configuration', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) class PipelineJobRuntimeBasicSettings(msrest.serialization.Model): """PipelineJobRuntimeBasicSettings. :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar experiment_name: :vartype experiment_name: str :ivar pipeline_job_name: :vartype pipeline_job_name: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar trigger_time_string: :vartype trigger_time_string: str :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] """ _attribute_map = { 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, } def __init__( self, **kwargs ): """ :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword experiment_name: :paramtype experiment_name: str :keyword pipeline_job_name: :paramtype pipeline_job_name: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword trigger_time_string: :paramtype trigger_time_string: str :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] """ super(PipelineJobRuntimeBasicSettings, self).__init__(**kwargs) self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None) self.experiment_name = kwargs.get('experiment_name', None) self.pipeline_job_name = kwargs.get('pipeline_job_name', None) self.tags = kwargs.get('tags', None) self.display_name = kwargs.get('display_name', None) self.description = kwargs.get('description', None) self.trigger_time_string = kwargs.get('trigger_time_string', None) self.pipeline_parameters = kwargs.get('pipeline_parameters', None) self.data_path_assignments = kwargs.get('data_path_assignments', None) self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None) self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None) class PipelineJobScheduleDto(msrest.serialization.Model): """PipelineJobScheduleDto. :ivar system_data: :vartype system_data: ~flow.models.SystemData :ivar name: :vartype name: str :ivar pipeline_job_name: :vartype pipeline_job_name: str :ivar pipeline_job_runtime_settings: :vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings :ivar display_name: :vartype display_name: str :ivar trigger_type: Possible values include: "Recurrence", "Cron". :vartype trigger_type: str or ~flow.models.TriggerType :ivar recurrence: :vartype recurrence: ~flow.models.Recurrence :ivar cron: :vartype cron: ~flow.models.Cron :ivar status: Possible values include: "Enabled", "Disabled". :vartype status: str or ~flow.models.ScheduleStatus :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'name': {'key': 'name', 'type': 'str'}, 'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'}, 'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'trigger_type': {'key': 'triggerType', 'type': 'str'}, 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, 'cron': {'key': 'cron', 'type': 'Cron'}, 'status': {'key': 'status', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword system_data: :paramtype system_data: ~flow.models.SystemData :keyword name: :paramtype name: str :keyword pipeline_job_name: :paramtype pipeline_job_name: str :keyword pipeline_job_runtime_settings: :paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings :keyword display_name: :paramtype display_name: str :keyword trigger_type: Possible values include: "Recurrence", "Cron". :paramtype trigger_type: str or ~flow.models.TriggerType :keyword recurrence: :paramtype recurrence: ~flow.models.Recurrence :keyword cron: :paramtype cron: ~flow.models.Cron :keyword status: Possible values include: "Enabled", "Disabled". :paramtype status: str or ~flow.models.ScheduleStatus :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(PipelineJobScheduleDto, self).__init__(**kwargs) self.system_data = kwargs.get('system_data', None) self.name = kwargs.get('name', None) self.pipeline_job_name = kwargs.get('pipeline_job_name', None) self.pipeline_job_runtime_settings = kwargs.get('pipeline_job_runtime_settings', None) self.display_name = kwargs.get('display_name', None) self.trigger_type = kwargs.get('trigger_type', None) self.recurrence = kwargs.get('recurrence', None) self.cron = kwargs.get('cron', None) self.status = kwargs.get('status', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) class PipelineOutput(msrest.serialization.Model): """PipelineOutput. :ivar data: :vartype data: ~flow.models.MfeInternalOutputData """ _attribute_map = { 'data': {'key': 'data', 'type': 'MfeInternalOutputData'}, } def __init__( self, **kwargs ): """ :keyword data: :paramtype data: ~flow.models.MfeInternalOutputData """ super(PipelineOutput, self).__init__(**kwargs) self.data = kwargs.get('data', None) class PipelineRun(msrest.serialization.Model): """PipelineRun. :ivar pipeline_id: :vartype pipeline_id: str :ivar run_source: :vartype run_source: str :ivar run_type: Possible values include: "HTTP", "SDK", "Schedule", "Portal". :vartype run_type: str or ~flow.models.RunType :ivar parameters: This is a dictionary. :vartype parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignment: This is a dictionary. :vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar total_steps: :vartype total_steps: int :ivar logs: This is a dictionary. :vartype logs: dict[str, str] :ivar user_alias: :vartype user_alias: str :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar continue_run_on_failed_optional_input: :vartype continue_run_on_failed_optional_input: bool :ivar default_compute: :vartype default_compute: ~flow.models.ComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.DatastoreSetting :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.CloudPrioritySetting :ivar pipeline_timeout_seconds: :vartype pipeline_timeout_seconds: int :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar identity_config: :vartype identity_config: ~flow.models.IdentitySetting :ivar description: :vartype description: str :ivar display_name: :vartype display_name: str :ivar run_number: :vartype run_number: int :ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown". :vartype status_code: str or ~flow.models.PipelineStatusCode :ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype run_status: str or ~flow.models.RunStatus :ivar status_detail: :vartype status_detail: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar graph_id: :vartype graph_id: str :ivar experiment_id: :vartype experiment_id: str :ivar experiment_name: :vartype experiment_name: str :ivar is_experiment_archived: :vartype is_experiment_archived: bool :ivar submitted_by: :vartype submitted_by: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar step_tags: This is a dictionary. :vartype step_tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar aether_start_time: :vartype aether_start_time: ~datetime.datetime :ivar aether_end_time: :vartype aether_end_time: ~datetime.datetime :ivar run_history_start_time: :vartype run_history_start_time: ~datetime.datetime :ivar run_history_end_time: :vartype run_history_end_time: ~datetime.datetime :ivar unique_child_run_compute_targets: :vartype unique_child_run_compute_targets: list[str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _validation = { 'unique_child_run_compute_targets': {'unique': True}, } _attribute_map = { 'pipeline_id': {'key': 'pipelineId', 'type': 'str'}, 'run_source': {'key': 'runSource', 'type': 'str'}, 'run_type': {'key': 'runType', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'total_steps': {'key': 'totalSteps', 'type': 'int'}, 'logs': {'key': 'logs', 'type': '{str}'}, 'user_alias': {'key': 'userAlias', 'type': 'str'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'continue_run_on_failed_optional_input': {'key': 'continueRunOnFailedOptionalInput', 'type': 'bool'}, 'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'}, 'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'}, 'pipeline_timeout_seconds': {'key': 'pipelineTimeoutSeconds', 'type': 'int'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'identity_config': {'key': 'identityConfig', 'type': 'IdentitySetting'}, 'description': {'key': 'description', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'run_number': {'key': 'runNumber', 'type': 'int'}, 'status_code': {'key': 'statusCode', 'type': 'str'}, 'run_status': {'key': 'runStatus', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'graph_id': {'key': 'graphId', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'}, 'submitted_by': {'key': 'submittedBy', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'step_tags': {'key': 'stepTags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'}, 'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'}, 'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'}, 'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'}, 'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword pipeline_id: :paramtype pipeline_id: str :keyword run_source: :paramtype run_source: str :keyword run_type: Possible values include: "HTTP", "SDK", "Schedule", "Portal". :paramtype run_type: str or ~flow.models.RunType :keyword parameters: This is a dictionary. :paramtype parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignment: This is a dictionary. :paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword total_steps: :paramtype total_steps: int :keyword logs: This is a dictionary. :paramtype logs: dict[str, str] :keyword user_alias: :paramtype user_alias: str :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword continue_run_on_failed_optional_input: :paramtype continue_run_on_failed_optional_input: bool :keyword default_compute: :paramtype default_compute: ~flow.models.ComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.DatastoreSetting :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting :keyword pipeline_timeout_seconds: :paramtype pipeline_timeout_seconds: int :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword identity_config: :paramtype identity_config: ~flow.models.IdentitySetting :keyword description: :paramtype description: str :keyword display_name: :paramtype display_name: str :keyword run_number: :paramtype run_number: int :keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown". :paramtype status_code: str or ~flow.models.PipelineStatusCode :keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype run_status: str or ~flow.models.RunStatus :keyword status_detail: :paramtype status_detail: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword graph_id: :paramtype graph_id: str :keyword experiment_id: :paramtype experiment_id: str :keyword experiment_name: :paramtype experiment_name: str :keyword is_experiment_archived: :paramtype is_experiment_archived: bool :keyword submitted_by: :paramtype submitted_by: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword step_tags: This is a dictionary. :paramtype step_tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword aether_start_time: :paramtype aether_start_time: ~datetime.datetime :keyword aether_end_time: :paramtype aether_end_time: ~datetime.datetime :keyword run_history_start_time: :paramtype run_history_start_time: ~datetime.datetime :keyword run_history_end_time: :paramtype run_history_end_time: ~datetime.datetime :keyword unique_child_run_compute_targets: :paramtype unique_child_run_compute_targets: list[str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineRun, self).__init__(**kwargs) self.pipeline_id = kwargs.get('pipeline_id', None) self.run_source = kwargs.get('run_source', None) self.run_type = kwargs.get('run_type', None) self.parameters = kwargs.get('parameters', None) self.data_path_assignments = kwargs.get('data_path_assignments', None) self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None) self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None) self.total_steps = kwargs.get('total_steps', None) self.logs = kwargs.get('logs', None) self.user_alias = kwargs.get('user_alias', None) self.enforce_rerun = kwargs.get('enforce_rerun', None) self.continue_run_on_failed_optional_input = kwargs.get('continue_run_on_failed_optional_input', None) self.default_compute = kwargs.get('default_compute', None) self.default_datastore = kwargs.get('default_datastore', None) self.default_cloud_priority = kwargs.get('default_cloud_priority', None) self.pipeline_timeout_seconds = kwargs.get('pipeline_timeout_seconds', None) self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None) self.identity_config = kwargs.get('identity_config', None) self.description = kwargs.get('description', None) self.display_name = kwargs.get('display_name', None) self.run_number = kwargs.get('run_number', None) self.status_code = kwargs.get('status_code', None) self.run_status = kwargs.get('run_status', None) self.status_detail = kwargs.get('status_detail', None) self.start_time = kwargs.get('start_time', None) self.end_time = kwargs.get('end_time', None) self.graph_id = kwargs.get('graph_id', None) self.experiment_id = kwargs.get('experiment_id', None) self.experiment_name = kwargs.get('experiment_name', None) self.is_experiment_archived = kwargs.get('is_experiment_archived', None) self.submitted_by = kwargs.get('submitted_by', None) self.tags = kwargs.get('tags', None) self.step_tags = kwargs.get('step_tags', None) self.properties = kwargs.get('properties', None) self.aether_start_time = kwargs.get('aether_start_time', None) self.aether_end_time = kwargs.get('aether_end_time', None) self.run_history_start_time = kwargs.get('run_history_start_time', None) self.run_history_end_time = kwargs.get('run_history_end_time', None) self.unique_child_run_compute_targets = kwargs.get('unique_child_run_compute_targets', None) self.entity_status = kwargs.get('entity_status', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class PipelineRunGraphDetail(msrest.serialization.Model): """PipelineRunGraphDetail. :ivar graph: :vartype graph: ~flow.models.PipelineGraph :ivar graph_nodes_status: This is a dictionary. :vartype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo] """ _attribute_map = { 'graph': {'key': 'graph', 'type': 'PipelineGraph'}, 'graph_nodes_status': {'key': 'graphNodesStatus', 'type': '{GraphNodeStatusInfo}'}, } def __init__( self, **kwargs ): """ :keyword graph: :paramtype graph: ~flow.models.PipelineGraph :keyword graph_nodes_status: This is a dictionary. :paramtype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo] """ super(PipelineRunGraphDetail, self).__init__(**kwargs) self.graph = kwargs.get('graph', None) self.graph_nodes_status = kwargs.get('graph_nodes_status', None) class PipelineRunGraphStatus(msrest.serialization.Model): """PipelineRunGraphStatus. :ivar status: :vartype status: ~flow.models.PipelineStatus :ivar graph_nodes_status: This is a dictionary. :vartype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo] :ivar experiment_id: :vartype experiment_id: str :ivar is_experiment_archived: :vartype is_experiment_archived: bool """ _attribute_map = { 'status': {'key': 'status', 'type': 'PipelineStatus'}, 'graph_nodes_status': {'key': 'graphNodesStatus', 'type': '{GraphNodeStatusInfo}'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword status: :paramtype status: ~flow.models.PipelineStatus :keyword graph_nodes_status: This is a dictionary. :paramtype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo] :keyword experiment_id: :paramtype experiment_id: str :keyword is_experiment_archived: :paramtype is_experiment_archived: bool """ super(PipelineRunGraphStatus, self).__init__(**kwargs) self.status = kwargs.get('status', None) self.graph_nodes_status = kwargs.get('graph_nodes_status', None) self.experiment_id = kwargs.get('experiment_id', None) self.is_experiment_archived = kwargs.get('is_experiment_archived', None) class PipelineRunProfile(msrest.serialization.Model): """PipelineRunProfile. :ivar run_id: :vartype run_id: str :ivar node_id: :vartype node_id: str :ivar run_url: :vartype run_url: str :ivar experiment_name: :vartype experiment_name: str :ivar experiment_id: :vartype experiment_id: str :ivar description: :vartype description: str :ivar status: :vartype status: ~flow.models.PipelineRunStatus :ivar create_time: :vartype create_time: long :ivar start_time: :vartype start_time: long :ivar end_time: :vartype end_time: long :ivar profiling_time: :vartype profiling_time: long :ivar step_runs_profile: :vartype step_runs_profile: list[~flow.models.StepRunProfile] :ivar sub_pipeline_run_profile: :vartype sub_pipeline_run_profile: list[~flow.models.PipelineRunProfile] """ _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, 'node_id': {'key': 'nodeId', 'type': 'str'}, 'run_url': {'key': 'runUrl', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'status': {'key': 'status', 'type': 'PipelineRunStatus'}, 'create_time': {'key': 'createTime', 'type': 'long'}, 'start_time': {'key': 'startTime', 'type': 'long'}, 'end_time': {'key': 'endTime', 'type': 'long'}, 'profiling_time': {'key': 'profilingTime', 'type': 'long'}, 'step_runs_profile': {'key': 'stepRunsProfile', 'type': '[StepRunProfile]'}, 'sub_pipeline_run_profile': {'key': 'subPipelineRunProfile', 'type': '[PipelineRunProfile]'}, } def __init__( self, **kwargs ): """ :keyword run_id: :paramtype run_id: str :keyword node_id: :paramtype node_id: str :keyword run_url: :paramtype run_url: str :keyword experiment_name: :paramtype experiment_name: str :keyword experiment_id: :paramtype experiment_id: str :keyword description: :paramtype description: str :keyword status: :paramtype status: ~flow.models.PipelineRunStatus :keyword create_time: :paramtype create_time: long :keyword start_time: :paramtype start_time: long :keyword end_time: :paramtype end_time: long :keyword profiling_time: :paramtype profiling_time: long :keyword step_runs_profile: :paramtype step_runs_profile: list[~flow.models.StepRunProfile] :keyword sub_pipeline_run_profile: :paramtype sub_pipeline_run_profile: list[~flow.models.PipelineRunProfile] """ super(PipelineRunProfile, self).__init__(**kwargs) self.run_id = kwargs.get('run_id', None) self.node_id = kwargs.get('node_id', None) self.run_url = kwargs.get('run_url', None) self.experiment_name = kwargs.get('experiment_name', None) self.experiment_id = kwargs.get('experiment_id', None) self.description = kwargs.get('description', None) self.status = kwargs.get('status', None) self.create_time = kwargs.get('create_time', None) self.start_time = kwargs.get('start_time', None) self.end_time = kwargs.get('end_time', None) self.profiling_time = kwargs.get('profiling_time', None) self.step_runs_profile = kwargs.get('step_runs_profile', None) self.sub_pipeline_run_profile = kwargs.get('sub_pipeline_run_profile', None) class PipelineRunStatus(msrest.serialization.Model): """PipelineRunStatus. :ivar status_code: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :vartype status_code: str or ~flow.models.PipelineRunStatusCode :ivar status_detail: :vartype status_detail: str :ivar creation_time: :vartype creation_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime """ _attribute_map = { 'status_code': {'key': 'statusCode', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'creation_time': {'key': 'creationTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword status_code: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :paramtype status_code: str or ~flow.models.PipelineRunStatusCode :keyword status_detail: :paramtype status_detail: str :keyword creation_time: :paramtype creation_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime """ super(PipelineRunStatus, self).__init__(**kwargs) self.status_code = kwargs.get('status_code', None) self.status_detail = kwargs.get('status_detail', None) self.creation_time = kwargs.get('creation_time', None) self.end_time = kwargs.get('end_time', None) class PipelineRunStepDetails(msrest.serialization.Model): """PipelineRunStepDetails. :ivar run_id: :vartype run_id: str :ivar target: :vartype target: str :ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype status: str or ~flow.models.RunStatus :ivar status_detail: :vartype status_detail: str :ivar parent_run_id: :vartype parent_run_id: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar is_reused: :vartype is_reused: bool :ivar logs: This is a dictionary. :vartype logs: dict[str, str] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, str] :ivar snapshot_info: :vartype snapshot_info: ~flow.models.SnapshotInfo :ivar input_datasets: :vartype input_datasets: list[~flow.models.DatasetLineage] :ivar output_datasets: :vartype output_datasets: list[~flow.models.OutputDatasetLineage] """ _validation = { 'input_datasets': {'unique': True}, 'output_datasets': {'unique': True}, } _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'is_reused': {'key': 'isReused', 'type': 'bool'}, 'logs': {'key': 'logs', 'type': '{str}'}, 'outputs': {'key': 'outputs', 'type': '{str}'}, 'snapshot_info': {'key': 'snapshotInfo', 'type': 'SnapshotInfo'}, 'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'}, 'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'}, } def __init__( self, **kwargs ): """ :keyword run_id: :paramtype run_id: str :keyword target: :paramtype target: str :keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype status: str or ~flow.models.RunStatus :keyword status_detail: :paramtype status_detail: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword is_reused: :paramtype is_reused: bool :keyword logs: This is a dictionary. :paramtype logs: dict[str, str] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, str] :keyword snapshot_info: :paramtype snapshot_info: ~flow.models.SnapshotInfo :keyword input_datasets: :paramtype input_datasets: list[~flow.models.DatasetLineage] :keyword output_datasets: :paramtype output_datasets: list[~flow.models.OutputDatasetLineage] """ super(PipelineRunStepDetails, self).__init__(**kwargs) self.run_id = kwargs.get('run_id', None) self.target = kwargs.get('target', None) self.status = kwargs.get('status', None) self.status_detail = kwargs.get('status_detail', None) self.parent_run_id = kwargs.get('parent_run_id', None) self.start_time = kwargs.get('start_time', None) self.end_time = kwargs.get('end_time', None) self.is_reused = kwargs.get('is_reused', None) self.logs = kwargs.get('logs', None) self.outputs = kwargs.get('outputs', None) self.snapshot_info = kwargs.get('snapshot_info', None) self.input_datasets = kwargs.get('input_datasets', None) self.output_datasets = kwargs.get('output_datasets', None) class PipelineRunSummary(msrest.serialization.Model): """PipelineRunSummary. :ivar description: :vartype description: str :ivar display_name: :vartype display_name: str :ivar run_number: :vartype run_number: int :ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown". :vartype status_code: str or ~flow.models.PipelineStatusCode :ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype run_status: str or ~flow.models.RunStatus :ivar status_detail: :vartype status_detail: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar graph_id: :vartype graph_id: str :ivar experiment_id: :vartype experiment_id: str :ivar experiment_name: :vartype experiment_name: str :ivar is_experiment_archived: :vartype is_experiment_archived: bool :ivar submitted_by: :vartype submitted_by: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar step_tags: This is a dictionary. :vartype step_tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar aether_start_time: :vartype aether_start_time: ~datetime.datetime :ivar aether_end_time: :vartype aether_end_time: ~datetime.datetime :ivar run_history_start_time: :vartype run_history_start_time: ~datetime.datetime :ivar run_history_end_time: :vartype run_history_end_time: ~datetime.datetime :ivar unique_child_run_compute_targets: :vartype unique_child_run_compute_targets: list[str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _validation = { 'unique_child_run_compute_targets': {'unique': True}, } _attribute_map = { 'description': {'key': 'description', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'run_number': {'key': 'runNumber', 'type': 'int'}, 'status_code': {'key': 'statusCode', 'type': 'str'}, 'run_status': {'key': 'runStatus', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'graph_id': {'key': 'graphId', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'}, 'submitted_by': {'key': 'submittedBy', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'step_tags': {'key': 'stepTags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'}, 'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'}, 'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'}, 'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'}, 'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword description: :paramtype description: str :keyword display_name: :paramtype display_name: str :keyword run_number: :paramtype run_number: int :keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown". :paramtype status_code: str or ~flow.models.PipelineStatusCode :keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype run_status: str or ~flow.models.RunStatus :keyword status_detail: :paramtype status_detail: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword graph_id: :paramtype graph_id: str :keyword experiment_id: :paramtype experiment_id: str :keyword experiment_name: :paramtype experiment_name: str :keyword is_experiment_archived: :paramtype is_experiment_archived: bool :keyword submitted_by: :paramtype submitted_by: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword step_tags: This is a dictionary. :paramtype step_tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword aether_start_time: :paramtype aether_start_time: ~datetime.datetime :keyword aether_end_time: :paramtype aether_end_time: ~datetime.datetime :keyword run_history_start_time: :paramtype run_history_start_time: ~datetime.datetime :keyword run_history_end_time: :paramtype run_history_end_time: ~datetime.datetime :keyword unique_child_run_compute_targets: :paramtype unique_child_run_compute_targets: list[str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineRunSummary, self).__init__(**kwargs) self.description = kwargs.get('description', None) self.display_name = kwargs.get('display_name', None) self.run_number = kwargs.get('run_number', None) self.status_code = kwargs.get('status_code', None) self.run_status = kwargs.get('run_status', None) self.status_detail = kwargs.get('status_detail', None) self.start_time = kwargs.get('start_time', None) self.end_time = kwargs.get('end_time', None) self.graph_id = kwargs.get('graph_id', None) self.experiment_id = kwargs.get('experiment_id', None) self.experiment_name = kwargs.get('experiment_name', None) self.is_experiment_archived = kwargs.get('is_experiment_archived', None) self.submitted_by = kwargs.get('submitted_by', None) self.tags = kwargs.get('tags', None) self.step_tags = kwargs.get('step_tags', None) self.properties = kwargs.get('properties', None) self.aether_start_time = kwargs.get('aether_start_time', None) self.aether_end_time = kwargs.get('aether_end_time', None) self.run_history_start_time = kwargs.get('run_history_start_time', None) self.run_history_end_time = kwargs.get('run_history_end_time', None) self.unique_child_run_compute_targets = kwargs.get('unique_child_run_compute_targets', None) self.entity_status = kwargs.get('entity_status', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class PipelineStatus(msrest.serialization.Model): """PipelineStatus. Variables are only populated by the server, and will be ignored when sending a request. :ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown". :vartype status_code: str or ~flow.models.PipelineStatusCode :ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype run_status: str or ~flow.models.RunStatus :ivar status_detail: :vartype status_detail: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar is_terminal_state: :vartype is_terminal_state: bool """ _validation = { 'is_terminal_state': {'readonly': True}, } _attribute_map = { 'status_code': {'key': 'statusCode', 'type': 'str'}, 'run_status': {'key': 'runStatus', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'is_terminal_state': {'key': 'isTerminalState', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown". :paramtype status_code: str or ~flow.models.PipelineStatusCode :keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype run_status: str or ~flow.models.RunStatus :keyword status_detail: :paramtype status_detail: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime """ super(PipelineStatus, self).__init__(**kwargs) self.status_code = kwargs.get('status_code', None) self.run_status = kwargs.get('run_status', None) self.status_detail = kwargs.get('status_detail', None) self.start_time = kwargs.get('start_time', None) self.end_time = kwargs.get('end_time', None) self.is_terminal_state = None class PipelineStepRun(msrest.serialization.Model): """PipelineStepRun. :ivar step_name: :vartype step_name: str :ivar run_number: :vartype run_number: int :ivar run_id: :vartype run_id: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype run_status: str or ~flow.models.RunStatus :ivar compute_target: :vartype compute_target: str :ivar compute_type: :vartype compute_type: str :ivar run_type: :vartype run_type: str :ivar step_type: :vartype step_type: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar is_reused: :vartype is_reused: bool :ivar display_name: :vartype display_name: str """ _attribute_map = { 'step_name': {'key': 'stepName', 'type': 'str'}, 'run_number': {'key': 'runNumber', 'type': 'int'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'run_status': {'key': 'runStatus', 'type': 'str'}, 'compute_target': {'key': 'computeTarget', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'run_type': {'key': 'runType', 'type': 'str'}, 'step_type': {'key': 'stepType', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'is_reused': {'key': 'isReused', 'type': 'bool'}, 'display_name': {'key': 'displayName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword step_name: :paramtype step_name: str :keyword run_number: :paramtype run_number: int :keyword run_id: :paramtype run_id: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype run_status: str or ~flow.models.RunStatus :keyword compute_target: :paramtype compute_target: str :keyword compute_type: :paramtype compute_type: str :keyword run_type: :paramtype run_type: str :keyword step_type: :paramtype step_type: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword is_reused: :paramtype is_reused: bool :keyword display_name: :paramtype display_name: str """ super(PipelineStepRun, self).__init__(**kwargs) self.step_name = kwargs.get('step_name', None) self.run_number = kwargs.get('run_number', None) self.run_id = kwargs.get('run_id', None) self.start_time = kwargs.get('start_time', None) self.end_time = kwargs.get('end_time', None) self.run_status = kwargs.get('run_status', None) self.compute_target = kwargs.get('compute_target', None) self.compute_type = kwargs.get('compute_type', None) self.run_type = kwargs.get('run_type', None) self.step_type = kwargs.get('step_type', None) self.tags = kwargs.get('tags', None) self.is_reused = kwargs.get('is_reused', None) self.display_name = kwargs.get('display_name', None) class PipelineStepRunOutputs(msrest.serialization.Model): """PipelineStepRunOutputs. :ivar outputs: This is a dictionary. :vartype outputs: dict[str, str] :ivar port_outputs: This is a dictionary. :vartype port_outputs: dict[str, ~flow.models.PortOutputInfo] """ _attribute_map = { 'outputs': {'key': 'outputs', 'type': '{str}'}, 'port_outputs': {'key': 'portOutputs', 'type': '{PortOutputInfo}'}, } def __init__( self, **kwargs ): """ :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, str] :keyword port_outputs: This is a dictionary. :paramtype port_outputs: dict[str, ~flow.models.PortOutputInfo] """ super(PipelineStepRunOutputs, self).__init__(**kwargs) self.outputs = kwargs.get('outputs', None) self.port_outputs = kwargs.get('port_outputs', None) class PipelineSubDraft(msrest.serialization.Model): """PipelineSubDraft. :ivar parent_graph_draft_id: :vartype parent_graph_draft_id: str :ivar parent_node_id: :vartype parent_node_id: str :ivar graph_detail: :vartype graph_detail: ~flow.models.PipelineRunGraphDetail :ivar module_dto: :vartype module_dto: ~flow.models.ModuleDto :ivar name: :vartype name: str :ivar last_edited_by: :vartype last_edited_by: str :ivar created_by: :vartype created_by: str :ivar description: :vartype description: str :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'parent_graph_draft_id': {'key': 'parentGraphDraftId', 'type': 'str'}, 'parent_node_id': {'key': 'parentNodeId', 'type': 'str'}, 'graph_detail': {'key': 'graphDetail', 'type': 'PipelineRunGraphDetail'}, 'module_dto': {'key': 'moduleDto', 'type': 'ModuleDto'}, 'name': {'key': 'name', 'type': 'str'}, 'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'}, 'created_by': {'key': 'createdBy', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword parent_graph_draft_id: :paramtype parent_graph_draft_id: str :keyword parent_node_id: :paramtype parent_node_id: str :keyword graph_detail: :paramtype graph_detail: ~flow.models.PipelineRunGraphDetail :keyword module_dto: :paramtype module_dto: ~flow.models.ModuleDto :keyword name: :paramtype name: str :keyword last_edited_by: :paramtype last_edited_by: str :keyword created_by: :paramtype created_by: str :keyword description: :paramtype description: str :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineSubDraft, self).__init__(**kwargs) self.parent_graph_draft_id = kwargs.get('parent_graph_draft_id', None) self.parent_node_id = kwargs.get('parent_node_id', None) self.graph_detail = kwargs.get('graph_detail', None) self.module_dto = kwargs.get('module_dto', None) self.name = kwargs.get('name', None) self.last_edited_by = kwargs.get('last_edited_by', None) self.created_by = kwargs.get('created_by', None) self.description = kwargs.get('description', None) self.pipeline_type = kwargs.get('pipeline_type', None) self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.entity_status = kwargs.get('entity_status', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class PolicyValidationResponse(msrest.serialization.Model): """PolicyValidationResponse. :ivar error_response: The error response. :vartype error_response: ~flow.models.ErrorResponse :ivar next_action_interval_in_seconds: :vartype next_action_interval_in_seconds: int :ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent". :vartype action_type: str or ~flow.models.ActionType """ _attribute_map = { 'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'}, 'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'}, 'action_type': {'key': 'actionType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword error_response: The error response. :paramtype error_response: ~flow.models.ErrorResponse :keyword next_action_interval_in_seconds: :paramtype next_action_interval_in_seconds: int :keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent". :paramtype action_type: str or ~flow.models.ActionType """ super(PolicyValidationResponse, self).__init__(**kwargs) self.error_response = kwargs.get('error_response', None) self.next_action_interval_in_seconds = kwargs.get('next_action_interval_in_seconds', None) self.action_type = kwargs.get('action_type', None) class PortInfo(msrest.serialization.Model): """PortInfo. :ivar node_id: :vartype node_id: str :ivar port_name: :vartype port_name: str :ivar graph_port_name: :vartype graph_port_name: str :ivar is_parameter: :vartype is_parameter: bool :ivar web_service_port: :vartype web_service_port: str """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, 'graph_port_name': {'key': 'graphPortName', 'type': 'str'}, 'is_parameter': {'key': 'isParameter', 'type': 'bool'}, 'web_service_port': {'key': 'webServicePort', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword port_name: :paramtype port_name: str :keyword graph_port_name: :paramtype graph_port_name: str :keyword is_parameter: :paramtype is_parameter: bool :keyword web_service_port: :paramtype web_service_port: str """ super(PortInfo, self).__init__(**kwargs) self.node_id = kwargs.get('node_id', None) self.port_name = kwargs.get('port_name', None) self.graph_port_name = kwargs.get('graph_port_name', None) self.is_parameter = kwargs.get('is_parameter', None) self.web_service_port = kwargs.get('web_service_port', None) class PortOutputInfo(msrest.serialization.Model): """PortOutputInfo. :ivar container_uri: :vartype container_uri: str :ivar relative_path: :vartype relative_path: str :ivar preview_params: :vartype preview_params: str :ivar model_output_path: :vartype model_output_path: str :ivar data_store_name: :vartype data_store_name: str :ivar data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :vartype data_reference_type: str or ~flow.models.DataReferenceType :ivar is_file: :vartype is_file: bool :ivar supported_actions: :vartype supported_actions: list[str or ~flow.models.PortAction] """ _attribute_map = { 'container_uri': {'key': 'containerUri', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'preview_params': {'key': 'previewParams', 'type': 'str'}, 'model_output_path': {'key': 'modelOutputPath', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_reference_type': {'key': 'dataReferenceType', 'type': 'str'}, 'is_file': {'key': 'isFile', 'type': 'bool'}, 'supported_actions': {'key': 'supportedActions', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword container_uri: :paramtype container_uri: str :keyword relative_path: :paramtype relative_path: str :keyword preview_params: :paramtype preview_params: str :keyword model_output_path: :paramtype model_output_path: str :keyword data_store_name: :paramtype data_store_name: str :keyword data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :paramtype data_reference_type: str or ~flow.models.DataReferenceType :keyword is_file: :paramtype is_file: bool :keyword supported_actions: :paramtype supported_actions: list[str or ~flow.models.PortAction] """ super(PortOutputInfo, self).__init__(**kwargs) self.container_uri = kwargs.get('container_uri', None) self.relative_path = kwargs.get('relative_path', None) self.preview_params = kwargs.get('preview_params', None) self.model_output_path = kwargs.get('model_output_path', None) self.data_store_name = kwargs.get('data_store_name', None) self.data_reference_type = kwargs.get('data_reference_type', None) self.is_file = kwargs.get('is_file', None) self.supported_actions = kwargs.get('supported_actions', None) class PriorityConfig(msrest.serialization.Model): """PriorityConfig. :ivar job_priority: :vartype job_priority: int :ivar is_preemptible: :vartype is_preemptible: bool :ivar node_count_set: :vartype node_count_set: list[int] :ivar scale_interval: :vartype scale_interval: int """ _attribute_map = { 'job_priority': {'key': 'jobPriority', 'type': 'int'}, 'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'}, 'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'}, 'scale_interval': {'key': 'scaleInterval', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword job_priority: :paramtype job_priority: int :keyword is_preemptible: :paramtype is_preemptible: bool :keyword node_count_set: :paramtype node_count_set: list[int] :keyword scale_interval: :paramtype scale_interval: int """ super(PriorityConfig, self).__init__(**kwargs) self.job_priority = kwargs.get('job_priority', None) self.is_preemptible = kwargs.get('is_preemptible', None) self.node_count_set = kwargs.get('node_count_set', None) self.scale_interval = kwargs.get('scale_interval', None) class PriorityConfiguration(msrest.serialization.Model): """PriorityConfiguration. :ivar cloud_priority: :vartype cloud_priority: int :ivar string_type_priority: :vartype string_type_priority: str """ _attribute_map = { 'cloud_priority': {'key': 'cloudPriority', 'type': 'int'}, 'string_type_priority': {'key': 'stringTypePriority', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword cloud_priority: :paramtype cloud_priority: int :keyword string_type_priority: :paramtype string_type_priority: str """ super(PriorityConfiguration, self).__init__(**kwargs) self.cloud_priority = kwargs.get('cloud_priority', None) self.string_type_priority = kwargs.get('string_type_priority', None) class PromoteDataSetRequest(msrest.serialization.Model): """PromoteDataSetRequest. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar module_node_id: :vartype module_node_id: str :ivar step_run_id: :vartype step_run_id: str :ivar output_port_name: :vartype output_port_name: str :ivar model_output_path: :vartype model_output_path: str :ivar data_type_id: :vartype data_type_id: str :ivar dataset_type: :vartype dataset_type: str :ivar data_store_name: :vartype data_store_name: str :ivar output_relative_path: :vartype output_relative_path: str :ivar pipeline_run_id: :vartype pipeline_run_id: str :ivar root_pipeline_run_id: :vartype root_pipeline_run_id: str :ivar experiment_name: :vartype experiment_name: str :ivar experiment_id: :vartype experiment_id: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'module_node_id': {'key': 'moduleNodeId', 'type': 'str'}, 'step_run_id': {'key': 'stepRunId', 'type': 'str'}, 'output_port_name': {'key': 'outputPortName', 'type': 'str'}, 'model_output_path': {'key': 'modelOutputPath', 'type': 'str'}, 'data_type_id': {'key': 'dataTypeId', 'type': 'str'}, 'dataset_type': {'key': 'datasetType', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'output_relative_path': {'key': 'outputRelativePath', 'type': 'str'}, 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, 'root_pipeline_run_id': {'key': 'rootPipelineRunId', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword module_node_id: :paramtype module_node_id: str :keyword step_run_id: :paramtype step_run_id: str :keyword output_port_name: :paramtype output_port_name: str :keyword model_output_path: :paramtype model_output_path: str :keyword data_type_id: :paramtype data_type_id: str :keyword dataset_type: :paramtype dataset_type: str :keyword data_store_name: :paramtype data_store_name: str :keyword output_relative_path: :paramtype output_relative_path: str :keyword pipeline_run_id: :paramtype pipeline_run_id: str :keyword root_pipeline_run_id: :paramtype root_pipeline_run_id: str :keyword experiment_name: :paramtype experiment_name: str :keyword experiment_id: :paramtype experiment_id: str """ super(PromoteDataSetRequest, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.module_node_id = kwargs.get('module_node_id', None) self.step_run_id = kwargs.get('step_run_id', None) self.output_port_name = kwargs.get('output_port_name', None) self.model_output_path = kwargs.get('model_output_path', None) self.data_type_id = kwargs.get('data_type_id', None) self.dataset_type = kwargs.get('dataset_type', None) self.data_store_name = kwargs.get('data_store_name', None) self.output_relative_path = kwargs.get('output_relative_path', None) self.pipeline_run_id = kwargs.get('pipeline_run_id', None) self.root_pipeline_run_id = kwargs.get('root_pipeline_run_id', None) self.experiment_name = kwargs.get('experiment_name', None) self.experiment_id = kwargs.get('experiment_id', None) class ProviderEntity(msrest.serialization.Model): """ProviderEntity. :ivar provider: :vartype provider: str :ivar module: :vartype module: str :ivar connection_type: :vartype connection_type: list[str or ~flow.models.ConnectionType] :ivar apis: :vartype apis: list[~flow.models.ApiAndParameters] """ _attribute_map = { 'provider': {'key': 'provider', 'type': 'str'}, 'module': {'key': 'module', 'type': 'str'}, 'connection_type': {'key': 'connection_type', 'type': '[str]'}, 'apis': {'key': 'apis', 'type': '[ApiAndParameters]'}, } def __init__( self, **kwargs ): """ :keyword provider: :paramtype provider: str :keyword module: :paramtype module: str :keyword connection_type: :paramtype connection_type: list[str or ~flow.models.ConnectionType] :keyword apis: :paramtype apis: list[~flow.models.ApiAndParameters] """ super(ProviderEntity, self).__init__(**kwargs) self.provider = kwargs.get('provider', None) self.module = kwargs.get('module', None) self.connection_type = kwargs.get('connection_type', None) self.apis = kwargs.get('apis', None) class PublishedPipeline(msrest.serialization.Model): """PublishedPipeline. :ivar total_run_steps: :vartype total_run_steps: int :ivar total_runs: :vartype total_runs: int :ivar parameters: This is a dictionary. :vartype parameters: dict[str, str] :ivar data_set_definition_value_assignment: This is a dictionary. :vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue] :ivar rest_endpoint: :vartype rest_endpoint: str :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar graph_id: :vartype graph_id: str :ivar published_date: :vartype published_date: ~datetime.datetime :ivar last_run_time: :vartype last_run_time: ~datetime.datetime :ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :vartype last_run_status: str or ~flow.models.PipelineRunStatusCode :ivar published_by: :vartype published_by: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar version: :vartype version: str :ivar is_default: :vartype is_default: bool :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'total_run_steps': {'key': 'totalRunSteps', 'type': 'int'}, 'total_runs': {'key': 'totalRuns', 'type': 'int'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'}, 'rest_endpoint': {'key': 'restEndpoint', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'graph_id': {'key': 'graphId', 'type': 'str'}, 'published_date': {'key': 'publishedDate', 'type': 'iso-8601'}, 'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'}, 'last_run_status': {'key': 'lastRunStatus', 'type': 'str'}, 'published_by': {'key': 'publishedBy', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'version': {'key': 'version', 'type': 'str'}, 'is_default': {'key': 'isDefault', 'type': 'bool'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword total_run_steps: :paramtype total_run_steps: int :keyword total_runs: :paramtype total_runs: int :keyword parameters: This is a dictionary. :paramtype parameters: dict[str, str] :keyword data_set_definition_value_assignment: This is a dictionary. :paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue] :keyword rest_endpoint: :paramtype rest_endpoint: str :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword graph_id: :paramtype graph_id: str :keyword published_date: :paramtype published_date: ~datetime.datetime :keyword last_run_time: :paramtype last_run_time: ~datetime.datetime :keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode :keyword published_by: :paramtype published_by: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword version: :paramtype version: str :keyword is_default: :paramtype is_default: bool :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PublishedPipeline, self).__init__(**kwargs) self.total_run_steps = kwargs.get('total_run_steps', None) self.total_runs = kwargs.get('total_runs', None) self.parameters = kwargs.get('parameters', None) self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None) self.rest_endpoint = kwargs.get('rest_endpoint', None) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.graph_id = kwargs.get('graph_id', None) self.published_date = kwargs.get('published_date', None) self.last_run_time = kwargs.get('last_run_time', None) self.last_run_status = kwargs.get('last_run_status', None) self.published_by = kwargs.get('published_by', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.version = kwargs.get('version', None) self.is_default = kwargs.get('is_default', None) self.entity_status = kwargs.get('entity_status', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class PublishedPipelineSummary(msrest.serialization.Model): """PublishedPipelineSummary. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar graph_id: :vartype graph_id: str :ivar published_date: :vartype published_date: ~datetime.datetime :ivar last_run_time: :vartype last_run_time: ~datetime.datetime :ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :vartype last_run_status: str or ~flow.models.PipelineRunStatusCode :ivar published_by: :vartype published_by: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar version: :vartype version: str :ivar is_default: :vartype is_default: bool :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'graph_id': {'key': 'graphId', 'type': 'str'}, 'published_date': {'key': 'publishedDate', 'type': 'iso-8601'}, 'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'}, 'last_run_status': {'key': 'lastRunStatus', 'type': 'str'}, 'published_by': {'key': 'publishedBy', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'version': {'key': 'version', 'type': 'str'}, 'is_default': {'key': 'isDefault', 'type': 'bool'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword graph_id: :paramtype graph_id: str :keyword published_date: :paramtype published_date: ~datetime.datetime :keyword last_run_time: :paramtype last_run_time: ~datetime.datetime :keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode :keyword published_by: :paramtype published_by: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword version: :paramtype version: str :keyword is_default: :paramtype is_default: bool :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PublishedPipelineSummary, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.graph_id = kwargs.get('graph_id', None) self.published_date = kwargs.get('published_date', None) self.last_run_time = kwargs.get('last_run_time', None) self.last_run_status = kwargs.get('last_run_status', None) self.published_by = kwargs.get('published_by', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.version = kwargs.get('version', None) self.is_default = kwargs.get('is_default', None) self.entity_status = kwargs.get('entity_status', None) self.id = kwargs.get('id', None) self.etag = kwargs.get('etag', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) class PythonInterfaceMapping(msrest.serialization.Model): """PythonInterfaceMapping. :ivar name: :vartype name: str :ivar name_in_yaml: :vartype name_in_yaml: str :ivar argument_name: :vartype argument_name: str :ivar command_line_option: :vartype command_line_option: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'name_in_yaml': {'key': 'nameInYaml', 'type': 'str'}, 'argument_name': {'key': 'argumentName', 'type': 'str'}, 'command_line_option': {'key': 'commandLineOption', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword name_in_yaml: :paramtype name_in_yaml: str :keyword argument_name: :paramtype argument_name: str :keyword command_line_option: :paramtype command_line_option: str """ super(PythonInterfaceMapping, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.name_in_yaml = kwargs.get('name_in_yaml', None) self.argument_name = kwargs.get('argument_name', None) self.command_line_option = kwargs.get('command_line_option', None) class PythonPyPiOrRCranLibraryDto(msrest.serialization.Model): """PythonPyPiOrRCranLibraryDto. :ivar package: :vartype package: str :ivar repo: :vartype repo: str """ _attribute_map = { 'package': {'key': 'package', 'type': 'str'}, 'repo': {'key': 'repo', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword package: :paramtype package: str :keyword repo: :paramtype repo: str """ super(PythonPyPiOrRCranLibraryDto, self).__init__(**kwargs) self.package = kwargs.get('package', None) self.repo = kwargs.get('repo', None) class PythonSection(msrest.serialization.Model): """PythonSection. :ivar interpreter_path: :vartype interpreter_path: str :ivar user_managed_dependencies: :vartype user_managed_dependencies: bool :ivar conda_dependencies: Anything. :vartype conda_dependencies: any :ivar base_conda_environment: :vartype base_conda_environment: str """ _attribute_map = { 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'}, 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'}, 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'}, 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword interpreter_path: :paramtype interpreter_path: str :keyword user_managed_dependencies: :paramtype user_managed_dependencies: bool :keyword conda_dependencies: Anything. :paramtype conda_dependencies: any :keyword base_conda_environment: :paramtype base_conda_environment: str """ super(PythonSection, self).__init__(**kwargs) self.interpreter_path = kwargs.get('interpreter_path', None) self.user_managed_dependencies = kwargs.get('user_managed_dependencies', None) self.conda_dependencies = kwargs.get('conda_dependencies', None) self.base_conda_environment = kwargs.get('base_conda_environment', None) class PyTorchConfiguration(msrest.serialization.Model): """PyTorchConfiguration. :ivar communication_backend: :vartype communication_backend: str :ivar process_count: :vartype process_count: int """ _attribute_map = { 'communication_backend': {'key': 'communicationBackend', 'type': 'str'}, 'process_count': {'key': 'processCount', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword communication_backend: :paramtype communication_backend: str :keyword process_count: :paramtype process_count: int """ super(PyTorchConfiguration, self).__init__(**kwargs) self.communication_backend = kwargs.get('communication_backend', None) self.process_count = kwargs.get('process_count', None) class QueueingInfo(msrest.serialization.Model): """QueueingInfo. :ivar code: :vartype code: str :ivar message: :vartype message: str :ivar last_refresh_timestamp: :vartype last_refresh_timestamp: ~datetime.datetime """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'last_refresh_timestamp': {'key': 'lastRefreshTimestamp', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword code: :paramtype code: str :keyword message: :paramtype message: str :keyword last_refresh_timestamp: :paramtype last_refresh_timestamp: ~datetime.datetime """ super(QueueingInfo, self).__init__(**kwargs) self.code = kwargs.get('code', None) self.message = kwargs.get('message', None) self.last_refresh_timestamp = kwargs.get('last_refresh_timestamp', None) class RawComponentDto(msrest.serialization.Model): """RawComponentDto. :ivar component_schema: :vartype component_schema: str :ivar is_anonymous: :vartype is_anonymous: bool :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar type: Possible values include: "Unknown", "CommandComponent", "Command". :vartype type: str or ~flow.models.ComponentType :ivar component_type_version: :vartype component_type_version: str :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar is_deterministic: :vartype is_deterministic: bool :ivar successful_return_code: :vartype successful_return_code: str :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.ComponentInput] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.ComponentOutput] :ivar command: :vartype command: str :ivar environment_name: :vartype environment_name: str :ivar environment_version: :vartype environment_version: str :ivar snapshot_id: :vartype snapshot_id: str :ivar created_by: :vartype created_by: ~flow.models.SchemaContractsCreatedBy :ivar last_modified_by: :vartype last_modified_by: ~flow.models.SchemaContractsCreatedBy :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar component_internal_id: :vartype component_internal_id: str """ _attribute_map = { 'component_schema': {'key': 'componentSchema', 'type': 'str'}, 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'component_type_version': {'key': 'componentTypeVersion', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'}, 'successful_return_code': {'key': 'successfulReturnCode', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '{ComponentInput}'}, 'outputs': {'key': 'outputs', 'type': '{ComponentOutput}'}, 'command': {'key': 'command', 'type': 'str'}, 'environment_name': {'key': 'environmentName', 'type': 'str'}, 'environment_version': {'key': 'environmentVersion', 'type': 'str'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, 'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'}, 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'SchemaContractsCreatedBy'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'component_internal_id': {'key': 'componentInternalId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword component_schema: :paramtype component_schema: str :keyword is_anonymous: :paramtype is_anonymous: bool :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword type: Possible values include: "Unknown", "CommandComponent", "Command". :paramtype type: str or ~flow.models.ComponentType :keyword component_type_version: :paramtype component_type_version: str :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword is_deterministic: :paramtype is_deterministic: bool :keyword successful_return_code: :paramtype successful_return_code: str :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.ComponentInput] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.ComponentOutput] :keyword command: :paramtype command: str :keyword environment_name: :paramtype environment_name: str :keyword environment_version: :paramtype environment_version: str :keyword snapshot_id: :paramtype snapshot_id: str :keyword created_by: :paramtype created_by: ~flow.models.SchemaContractsCreatedBy :keyword last_modified_by: :paramtype last_modified_by: ~flow.models.SchemaContractsCreatedBy :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword component_internal_id: :paramtype component_internal_id: str """ super(RawComponentDto, self).__init__(**kwargs) self.component_schema = kwargs.get('component_schema', None) self.is_anonymous = kwargs.get('is_anonymous', None) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) self.type = kwargs.get('type', None) self.component_type_version = kwargs.get('component_type_version', None) self.display_name = kwargs.get('display_name', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.is_deterministic = kwargs.get('is_deterministic', None) self.successful_return_code = kwargs.get('successful_return_code', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.command = kwargs.get('command', None) self.environment_name = kwargs.get('environment_name', None) self.environment_version = kwargs.get('environment_version', None) self.snapshot_id = kwargs.get('snapshot_id', None) self.created_by = kwargs.get('created_by', None) self.last_modified_by = kwargs.get('last_modified_by', None) self.created_date = kwargs.get('created_date', None) self.last_modified_date = kwargs.get('last_modified_date', None) self.component_internal_id = kwargs.get('component_internal_id', None) class RayConfiguration(msrest.serialization.Model): """RayConfiguration. :ivar port: :vartype port: int :ivar address: :vartype address: str :ivar include_dashboard: :vartype include_dashboard: bool :ivar dashboard_port: :vartype dashboard_port: int :ivar head_node_additional_args: :vartype head_node_additional_args: str :ivar worker_node_additional_args: :vartype worker_node_additional_args: str """ _attribute_map = { 'port': {'key': 'port', 'type': 'int'}, 'address': {'key': 'address', 'type': 'str'}, 'include_dashboard': {'key': 'includeDashboard', 'type': 'bool'}, 'dashboard_port': {'key': 'dashboardPort', 'type': 'int'}, 'head_node_additional_args': {'key': 'headNodeAdditionalArgs', 'type': 'str'}, 'worker_node_additional_args': {'key': 'workerNodeAdditionalArgs', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword port: :paramtype port: int :keyword address: :paramtype address: str :keyword include_dashboard: :paramtype include_dashboard: bool :keyword dashboard_port: :paramtype dashboard_port: int :keyword head_node_additional_args: :paramtype head_node_additional_args: str :keyword worker_node_additional_args: :paramtype worker_node_additional_args: str """ super(RayConfiguration, self).__init__(**kwargs) self.port = kwargs.get('port', None) self.address = kwargs.get('address', None) self.include_dashboard = kwargs.get('include_dashboard', None) self.dashboard_port = kwargs.get('dashboard_port', None) self.head_node_additional_args = kwargs.get('head_node_additional_args', None) self.worker_node_additional_args = kwargs.get('worker_node_additional_args', None) class RCranPackage(msrest.serialization.Model): """RCranPackage. :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar repository: :vartype repository: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'repository': {'key': 'repository', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword repository: :paramtype repository: str """ super(RCranPackage, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) self.repository = kwargs.get('repository', None) class RealTimeEndpoint(msrest.serialization.Model): """RealTimeEndpoint. :ivar created_by: :vartype created_by: str :ivar kv_tags: Dictionary of :code:`<string>`. :vartype kv_tags: dict[str, str] :ivar state: Possible values include: "Transitioning", "Healthy", "Unhealthy", "Failed", "Unschedulable". :vartype state: str or ~flow.models.WebServiceState :ivar error: :vartype error: ~flow.models.ModelManagementErrorResponse :ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN". :vartype compute_type: str or ~flow.models.ComputeEnvironmentType :ivar image_id: :vartype image_id: str :ivar cpu: :vartype cpu: float :ivar memory_in_gb: :vartype memory_in_gb: float :ivar max_concurrent_requests_per_container: :vartype max_concurrent_requests_per_container: int :ivar num_replicas: :vartype num_replicas: int :ivar event_hub_enabled: :vartype event_hub_enabled: bool :ivar storage_enabled: :vartype storage_enabled: bool :ivar app_insights_enabled: :vartype app_insights_enabled: bool :ivar auto_scale_enabled: :vartype auto_scale_enabled: bool :ivar min_replicas: :vartype min_replicas: int :ivar max_replicas: :vartype max_replicas: int :ivar target_utilization: :vartype target_utilization: int :ivar refresh_period_in_seconds: :vartype refresh_period_in_seconds: int :ivar scoring_uri: :vartype scoring_uri: str :ivar deployment_status: :vartype deployment_status: ~flow.models.AKSReplicaStatus :ivar scoring_timeout_ms: :vartype scoring_timeout_ms: int :ivar auth_enabled: :vartype auth_enabled: bool :ivar aad_auth_enabled: :vartype aad_auth_enabled: bool :ivar region: :vartype region: str :ivar primary_key: :vartype primary_key: str :ivar secondary_key: :vartype secondary_key: str :ivar swagger_uri: :vartype swagger_uri: str :ivar linked_pipeline_draft_id: :vartype linked_pipeline_draft_id: str :ivar linked_pipeline_run_id: :vartype linked_pipeline_run_id: str :ivar warning: :vartype warning: str :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar id: :vartype id: str :ivar created_time: :vartype created_time: ~datetime.datetime :ivar updated_time: :vartype updated_time: ~datetime.datetime :ivar compute_name: :vartype compute_name: str :ivar updated_by: :vartype updated_by: str """ _attribute_map = { 'created_by': {'key': 'createdBy', 'type': 'str'}, 'kv_tags': {'key': 'kvTags', 'type': '{str}'}, 'state': {'key': 'state', 'type': 'str'}, 'error': {'key': 'error', 'type': 'ModelManagementErrorResponse'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'image_id': {'key': 'imageId', 'type': 'str'}, 'cpu': {'key': 'cpu', 'type': 'float'}, 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'}, 'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'}, 'num_replicas': {'key': 'numReplicas', 'type': 'int'}, 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'}, 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'}, 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, 'auto_scale_enabled': {'key': 'autoScaleEnabled', 'type': 'bool'}, 'min_replicas': {'key': 'minReplicas', 'type': 'int'}, 'max_replicas': {'key': 'maxReplicas', 'type': 'int'}, 'target_utilization': {'key': 'targetUtilization', 'type': 'int'}, 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'}, 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, 'deployment_status': {'key': 'deploymentStatus', 'type': 'AKSReplicaStatus'}, 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'}, 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'}, 'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'}, 'region': {'key': 'region', 'type': 'str'}, 'primary_key': {'key': 'primaryKey', 'type': 'str'}, 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, 'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'}, 'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'}, 'warning': {'key': 'warning', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'}, 'compute_name': {'key': 'computeName', 'type': 'str'}, 'updated_by': {'key': 'updatedBy', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword created_by: :paramtype created_by: str :keyword kv_tags: Dictionary of :code:`<string>`. :paramtype kv_tags: dict[str, str] :keyword state: Possible values include: "Transitioning", "Healthy", "Unhealthy", "Failed", "Unschedulable". :paramtype state: str or ~flow.models.WebServiceState :keyword error: :paramtype error: ~flow.models.ModelManagementErrorResponse :keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN". :paramtype compute_type: str or ~flow.models.ComputeEnvironmentType :keyword image_id: :paramtype image_id: str :keyword cpu: :paramtype cpu: float :keyword memory_in_gb: :paramtype memory_in_gb: float :keyword max_concurrent_requests_per_container: :paramtype max_concurrent_requests_per_container: int :keyword num_replicas: :paramtype num_replicas: int :keyword event_hub_enabled: :paramtype event_hub_enabled: bool :keyword storage_enabled: :paramtype storage_enabled: bool :keyword app_insights_enabled: :paramtype app_insights_enabled: bool :keyword auto_scale_enabled: :paramtype auto_scale_enabled: bool :keyword min_replicas: :paramtype min_replicas: int :keyword max_replicas: :paramtype max_replicas: int :keyword target_utilization: :paramtype target_utilization: int :keyword refresh_period_in_seconds: :paramtype refresh_period_in_seconds: int :keyword scoring_uri: :paramtype scoring_uri: str :keyword deployment_status: :paramtype deployment_status: ~flow.models.AKSReplicaStatus :keyword scoring_timeout_ms: :paramtype scoring_timeout_ms: int :keyword auth_enabled: :paramtype auth_enabled: bool :keyword aad_auth_enabled: :paramtype aad_auth_enabled: bool :keyword region: :paramtype region: str :keyword primary_key: :paramtype primary_key: str :keyword secondary_key: :paramtype secondary_key: str :keyword swagger_uri: :paramtype swagger_uri: str :keyword linked_pipeline_draft_id: :paramtype linked_pipeline_draft_id: str :keyword linked_pipeline_run_id: :paramtype linked_pipeline_run_id: str :keyword warning: :paramtype warning: str :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword id: :paramtype id: str :keyword created_time: :paramtype created_time: ~datetime.datetime :keyword updated_time: :paramtype updated_time: ~datetime.datetime :keyword compute_name: :paramtype compute_name: str :keyword updated_by: :paramtype updated_by: str """ super(RealTimeEndpoint, self).__init__(**kwargs) self.created_by = kwargs.get('created_by', None) self.kv_tags = kwargs.get('kv_tags', None) self.state = kwargs.get('state', None) self.error = kwargs.get('error', None) self.compute_type = kwargs.get('compute_type', None) self.image_id = kwargs.get('image_id', None) self.cpu = kwargs.get('cpu', None) self.memory_in_gb = kwargs.get('memory_in_gb', None) self.max_concurrent_requests_per_container = kwargs.get('max_concurrent_requests_per_container', None) self.num_replicas = kwargs.get('num_replicas', None) self.event_hub_enabled = kwargs.get('event_hub_enabled', None) self.storage_enabled = kwargs.get('storage_enabled', None) self.app_insights_enabled = kwargs.get('app_insights_enabled', None) self.auto_scale_enabled = kwargs.get('auto_scale_enabled', None) self.min_replicas = kwargs.get('min_replicas', None) self.max_replicas = kwargs.get('max_replicas', None) self.target_utilization = kwargs.get('target_utilization', None) self.refresh_period_in_seconds = kwargs.get('refresh_period_in_seconds', None) self.scoring_uri = kwargs.get('scoring_uri', None) self.deployment_status = kwargs.get('deployment_status', None) self.scoring_timeout_ms = kwargs.get('scoring_timeout_ms', None) self.auth_enabled = kwargs.get('auth_enabled', None) self.aad_auth_enabled = kwargs.get('aad_auth_enabled', None) self.region = kwargs.get('region', None) self.primary_key = kwargs.get('primary_key', None) self.secondary_key = kwargs.get('secondary_key', None) self.swagger_uri = kwargs.get('swagger_uri', None) self.linked_pipeline_draft_id = kwargs.get('linked_pipeline_draft_id', None) self.linked_pipeline_run_id = kwargs.get('linked_pipeline_run_id', None) self.warning = kwargs.get('warning', None) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.id = kwargs.get('id', None) self.created_time = kwargs.get('created_time', None) self.updated_time = kwargs.get('updated_time', None) self.compute_name = kwargs.get('compute_name', None) self.updated_by = kwargs.get('updated_by', None) class RealTimeEndpointInfo(msrest.serialization.Model): """RealTimeEndpointInfo. :ivar web_service_inputs: :vartype web_service_inputs: list[~flow.models.WebServicePort] :ivar web_service_outputs: :vartype web_service_outputs: list[~flow.models.WebServicePort] :ivar deployments_info: :vartype deployments_info: list[~flow.models.DeploymentInfo] """ _attribute_map = { 'web_service_inputs': {'key': 'webServiceInputs', 'type': '[WebServicePort]'}, 'web_service_outputs': {'key': 'webServiceOutputs', 'type': '[WebServicePort]'}, 'deployments_info': {'key': 'deploymentsInfo', 'type': '[DeploymentInfo]'}, } def __init__( self, **kwargs ): """ :keyword web_service_inputs: :paramtype web_service_inputs: list[~flow.models.WebServicePort] :keyword web_service_outputs: :paramtype web_service_outputs: list[~flow.models.WebServicePort] :keyword deployments_info: :paramtype deployments_info: list[~flow.models.DeploymentInfo] """ super(RealTimeEndpointInfo, self).__init__(**kwargs) self.web_service_inputs = kwargs.get('web_service_inputs', None) self.web_service_outputs = kwargs.get('web_service_outputs', None) self.deployments_info = kwargs.get('deployments_info', None) class RealTimeEndpointStatus(msrest.serialization.Model): """RealTimeEndpointStatus. :ivar last_operation: Possible values include: "Create", "Update", "Delete". :vartype last_operation: str or ~flow.models.RealTimeEndpointOpCode :ivar last_operation_status: Possible values include: "Ongoing", "Succeeded", "Failed", "SucceededWithWarning". :vartype last_operation_status: str or ~flow.models.RealTimeEndpointOpStatusCode :ivar internal_step: Possible values include: "AboutToDeploy", "WaitAksComputeReady", "RegisterModels", "CreateServiceFromModels", "UpdateServiceFromModels", "WaitServiceCreating", "FetchServiceRelatedInfo", "TestWithSampleData", "AboutToDelete", "DeleteDeployment", "DeleteAsset", "DeleteImage", "DeleteModel", "DeleteServiceRecord". :vartype internal_step: str or ~flow.models.RealTimeEndpointInternalStepCode :ivar status_detail: :vartype status_detail: str :ivar deployment_state: :vartype deployment_state: str :ivar service_id: :vartype service_id: str :ivar linked_pipeline_draft_id: :vartype linked_pipeline_draft_id: str """ _attribute_map = { 'last_operation': {'key': 'lastOperation', 'type': 'str'}, 'last_operation_status': {'key': 'lastOperationStatus', 'type': 'str'}, 'internal_step': {'key': 'internalStep', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'deployment_state': {'key': 'deploymentState', 'type': 'str'}, 'service_id': {'key': 'serviceId', 'type': 'str'}, 'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword last_operation: Possible values include: "Create", "Update", "Delete". :paramtype last_operation: str or ~flow.models.RealTimeEndpointOpCode :keyword last_operation_status: Possible values include: "Ongoing", "Succeeded", "Failed", "SucceededWithWarning". :paramtype last_operation_status: str or ~flow.models.RealTimeEndpointOpStatusCode :keyword internal_step: Possible values include: "AboutToDeploy", "WaitAksComputeReady", "RegisterModels", "CreateServiceFromModels", "UpdateServiceFromModels", "WaitServiceCreating", "FetchServiceRelatedInfo", "TestWithSampleData", "AboutToDelete", "DeleteDeployment", "DeleteAsset", "DeleteImage", "DeleteModel", "DeleteServiceRecord". :paramtype internal_step: str or ~flow.models.RealTimeEndpointInternalStepCode :keyword status_detail: :paramtype status_detail: str :keyword deployment_state: :paramtype deployment_state: str :keyword service_id: :paramtype service_id: str :keyword linked_pipeline_draft_id: :paramtype linked_pipeline_draft_id: str """ super(RealTimeEndpointStatus, self).__init__(**kwargs) self.last_operation = kwargs.get('last_operation', None) self.last_operation_status = kwargs.get('last_operation_status', None) self.internal_step = kwargs.get('internal_step', None) self.status_detail = kwargs.get('status_detail', None) self.deployment_state = kwargs.get('deployment_state', None) self.service_id = kwargs.get('service_id', None) self.linked_pipeline_draft_id = kwargs.get('linked_pipeline_draft_id', None) class RealTimeEndpointSummary(msrest.serialization.Model): """RealTimeEndpointSummary. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar id: :vartype id: str :ivar created_time: :vartype created_time: ~datetime.datetime :ivar updated_time: :vartype updated_time: ~datetime.datetime :ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN". :vartype compute_type: str or ~flow.models.ComputeEnvironmentType :ivar compute_name: :vartype compute_name: str :ivar updated_by: :vartype updated_by: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'compute_name': {'key': 'computeName', 'type': 'str'}, 'updated_by': {'key': 'updatedBy', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword id: :paramtype id: str :keyword created_time: :paramtype created_time: ~datetime.datetime :keyword updated_time: :paramtype updated_time: ~datetime.datetime :keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN". :paramtype compute_type: str or ~flow.models.ComputeEnvironmentType :keyword compute_name: :paramtype compute_name: str :keyword updated_by: :paramtype updated_by: str """ super(RealTimeEndpointSummary, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.id = kwargs.get('id', None) self.created_time = kwargs.get('created_time', None) self.updated_time = kwargs.get('updated_time', None) self.compute_type = kwargs.get('compute_type', None) self.compute_name = kwargs.get('compute_name', None) self.updated_by = kwargs.get('updated_by', None) class RealTimeEndpointTestRequest(msrest.serialization.Model): """RealTimeEndpointTestRequest. :ivar end_point: :vartype end_point: str :ivar auth_key: :vartype auth_key: str :ivar payload: :vartype payload: str """ _attribute_map = { 'end_point': {'key': 'endPoint', 'type': 'str'}, 'auth_key': {'key': 'authKey', 'type': 'str'}, 'payload': {'key': 'payload', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword end_point: :paramtype end_point: str :keyword auth_key: :paramtype auth_key: str :keyword payload: :paramtype payload: str """ super(RealTimeEndpointTestRequest, self).__init__(**kwargs) self.end_point = kwargs.get('end_point', None) self.auth_key = kwargs.get('auth_key', None) self.payload = kwargs.get('payload', None) class Recurrence(msrest.serialization.Model): """Recurrence. :ivar frequency: Possible values include: "Month", "Week", "Day", "Hour", "Minute". :vartype frequency: str or ~flow.models.Frequency :ivar interval: :vartype interval: int :ivar schedule: :vartype schedule: ~flow.models.RecurrenceSchedule :ivar end_time: :vartype end_time: str :ivar start_time: :vartype start_time: str :ivar time_zone: :vartype time_zone: str """ _attribute_map = { 'frequency': {'key': 'frequency', 'type': 'str'}, 'interval': {'key': 'interval', 'type': 'int'}, 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, 'end_time': {'key': 'endTime', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'str'}, 'time_zone': {'key': 'timeZone', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword frequency: Possible values include: "Month", "Week", "Day", "Hour", "Minute". :paramtype frequency: str or ~flow.models.Frequency :keyword interval: :paramtype interval: int :keyword schedule: :paramtype schedule: ~flow.models.RecurrenceSchedule :keyword end_time: :paramtype end_time: str :keyword start_time: :paramtype start_time: str :keyword time_zone: :paramtype time_zone: str """ super(Recurrence, self).__init__(**kwargs) self.frequency = kwargs.get('frequency', None) self.interval = kwargs.get('interval', None) self.schedule = kwargs.get('schedule', None) self.end_time = kwargs.get('end_time', None) self.start_time = kwargs.get('start_time', None) self.time_zone = kwargs.get('time_zone', None) class RecurrencePattern(msrest.serialization.Model): """RecurrencePattern. :ivar hours: :vartype hours: list[int] :ivar minutes: :vartype minutes: list[int] :ivar weekdays: :vartype weekdays: list[str or ~flow.models.Weekday] """ _attribute_map = { 'hours': {'key': 'hours', 'type': '[int]'}, 'minutes': {'key': 'minutes', 'type': '[int]'}, 'weekdays': {'key': 'weekdays', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword hours: :paramtype hours: list[int] :keyword minutes: :paramtype minutes: list[int] :keyword weekdays: :paramtype weekdays: list[str or ~flow.models.Weekday] """ super(RecurrencePattern, self).__init__(**kwargs) self.hours = kwargs.get('hours', None) self.minutes = kwargs.get('minutes', None) self.weekdays = kwargs.get('weekdays', None) class RecurrenceSchedule(msrest.serialization.Model): """RecurrenceSchedule. :ivar hours: :vartype hours: list[int] :ivar minutes: :vartype minutes: list[int] :ivar week_days: :vartype week_days: list[str or ~flow.models.WeekDays] :ivar month_days: :vartype month_days: list[int] """ _attribute_map = { 'hours': {'key': 'hours', 'type': '[int]'}, 'minutes': {'key': 'minutes', 'type': '[int]'}, 'week_days': {'key': 'weekDays', 'type': '[str]'}, 'month_days': {'key': 'monthDays', 'type': '[int]'}, } def __init__( self, **kwargs ): """ :keyword hours: :paramtype hours: list[int] :keyword minutes: :paramtype minutes: list[int] :keyword week_days: :paramtype week_days: list[str or ~flow.models.WeekDays] :keyword month_days: :paramtype month_days: list[int] """ super(RecurrenceSchedule, self).__init__(**kwargs) self.hours = kwargs.get('hours', None) self.minutes = kwargs.get('minutes', None) self.week_days = kwargs.get('week_days', None) self.month_days = kwargs.get('month_days', None) class RegenerateServiceKeysRequest(msrest.serialization.Model): """RegenerateServiceKeysRequest. :ivar key_type: Possible values include: "Primary", "Secondary". :vartype key_type: str or ~flow.models.KeyType :ivar key_value: :vartype key_value: str """ _attribute_map = { 'key_type': {'key': 'keyType', 'type': 'str'}, 'key_value': {'key': 'keyValue', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword key_type: Possible values include: "Primary", "Secondary". :paramtype key_type: str or ~flow.models.KeyType :keyword key_value: :paramtype key_value: str """ super(RegenerateServiceKeysRequest, self).__init__(**kwargs) self.key_type = kwargs.get('key_type', None) self.key_value = kwargs.get('key_value', None) class RegisterComponentMetaInfo(msrest.serialization.Model): """RegisterComponentMetaInfo. :ivar aml_module_name: :vartype aml_module_name: str :ivar name_only_display_info: :vartype name_only_display_info: str :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar module_version_id: :vartype module_version_id: str :ivar snapshot_id: :vartype snapshot_id: str :ivar component_registration_type: Possible values include: "Normal", "AnonymousAmlModule", "AnonymousAmlModuleVersion", "ModuleEntityOnly". :vartype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum :ivar module_entity_from_yaml: :vartype module_entity_from_yaml: ~flow.models.ModuleEntity :ivar set_as_default_version: :vartype set_as_default_version: bool :ivar data_types_from_yaml: :vartype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo] :ivar data_type_mechanism: Possible values include: "ErrorWhenNotExisting", "RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly". :vartype data_type_mechanism: str or ~flow.models.DataTypeMechanism :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hashes: :vartype identifier_hashes: ~flow.models.RegisterComponentMetaInfoIdentifierHashes :ivar content_hash: :vartype content_hash: str :ivar extra_hash: :vartype extra_hash: str :ivar extra_hashes: :vartype extra_hashes: ~flow.models.RegisterComponentMetaInfoExtraHashes :ivar registration: :vartype registration: bool :ivar validate_only: :vartype validate_only: bool :ivar skip_workspace_related_check: :vartype skip_workspace_related_check: bool :ivar intellectual_property_protected_workspace_component_registration_allowed_publisher: :vartype intellectual_property_protected_workspace_component_registration_allowed_publisher: list[str] :ivar system_managed_registration: :vartype system_managed_registration: bool :ivar allow_dup_name_between_input_and_ouput_port: :vartype allow_dup_name_between_input_and_ouput_port: bool :ivar module_source: :vartype module_source: str :ivar module_scope: :vartype module_scope: str :ivar module_additional_includes_count: :vartype module_additional_includes_count: int :ivar module_os_type: :vartype module_os_type: str :ivar module_codegen_by: :vartype module_codegen_by: str :ivar module_client_source: :vartype module_client_source: str :ivar module_is_builtin: :vartype module_is_builtin: bool :ivar module_register_event_extension_fields: Dictionary of :code:`<string>`. :vartype module_register_event_extension_fields: dict[str, str] """ _attribute_map = { 'aml_module_name': {'key': 'amlModuleName', 'type': 'str'}, 'name_only_display_info': {'key': 'nameOnlyDisplayInfo', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'module_version_id': {'key': 'moduleVersionId', 'type': 'str'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, 'component_registration_type': {'key': 'componentRegistrationType', 'type': 'str'}, 'module_entity_from_yaml': {'key': 'moduleEntityFromYaml', 'type': 'ModuleEntity'}, 'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'}, 'data_types_from_yaml': {'key': 'dataTypesFromYaml', 'type': '[DataTypeCreationInfo]'}, 'data_type_mechanism': {'key': 'dataTypeMechanism', 'type': 'str'}, 'identifier_hash': {'key': 'identifierHash', 'type': 'str'}, 'identifier_hashes': {'key': 'identifierHashes', 'type': 'RegisterComponentMetaInfoIdentifierHashes'}, 'content_hash': {'key': 'contentHash', 'type': 'str'}, 'extra_hash': {'key': 'extraHash', 'type': 'str'}, 'extra_hashes': {'key': 'extraHashes', 'type': 'RegisterComponentMetaInfoExtraHashes'}, 'registration': {'key': 'registration', 'type': 'bool'}, 'validate_only': {'key': 'validateOnly', 'type': 'bool'}, 'skip_workspace_related_check': {'key': 'skipWorkspaceRelatedCheck', 'type': 'bool'}, 'intellectual_property_protected_workspace_component_registration_allowed_publisher': {'key': 'intellectualPropertyProtectedWorkspaceComponentRegistrationAllowedPublisher', 'type': '[str]'}, 'system_managed_registration': {'key': 'systemManagedRegistration', 'type': 'bool'}, 'allow_dup_name_between_input_and_ouput_port': {'key': 'allowDupNameBetweenInputAndOuputPort', 'type': 'bool'}, 'module_source': {'key': 'moduleSource', 'type': 'str'}, 'module_scope': {'key': 'moduleScope', 'type': 'str'}, 'module_additional_includes_count': {'key': 'moduleAdditionalIncludesCount', 'type': 'int'}, 'module_os_type': {'key': 'moduleOSType', 'type': 'str'}, 'module_codegen_by': {'key': 'moduleCodegenBy', 'type': 'str'}, 'module_client_source': {'key': 'moduleClientSource', 'type': 'str'}, 'module_is_builtin': {'key': 'moduleIsBuiltin', 'type': 'bool'}, 'module_register_event_extension_fields': {'key': 'moduleRegisterEventExtensionFields', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword aml_module_name: :paramtype aml_module_name: str :keyword name_only_display_info: :paramtype name_only_display_info: str :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword module_version_id: :paramtype module_version_id: str :keyword snapshot_id: :paramtype snapshot_id: str :keyword component_registration_type: Possible values include: "Normal", "AnonymousAmlModule", "AnonymousAmlModuleVersion", "ModuleEntityOnly". :paramtype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum :keyword module_entity_from_yaml: :paramtype module_entity_from_yaml: ~flow.models.ModuleEntity :keyword set_as_default_version: :paramtype set_as_default_version: bool :keyword data_types_from_yaml: :paramtype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo] :keyword data_type_mechanism: Possible values include: "ErrorWhenNotExisting", "RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly". :paramtype data_type_mechanism: str or ~flow.models.DataTypeMechanism :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hashes: :paramtype identifier_hashes: ~flow.models.RegisterComponentMetaInfoIdentifierHashes :keyword content_hash: :paramtype content_hash: str :keyword extra_hash: :paramtype extra_hash: str :keyword extra_hashes: :paramtype extra_hashes: ~flow.models.RegisterComponentMetaInfoExtraHashes :keyword registration: :paramtype registration: bool :keyword validate_only: :paramtype validate_only: bool :keyword skip_workspace_related_check: :paramtype skip_workspace_related_check: bool :keyword intellectual_property_protected_workspace_component_registration_allowed_publisher: :paramtype intellectual_property_protected_workspace_component_registration_allowed_publisher: list[str] :keyword system_managed_registration: :paramtype system_managed_registration: bool :keyword allow_dup_name_between_input_and_ouput_port: :paramtype allow_dup_name_between_input_and_ouput_port: bool :keyword module_source: :paramtype module_source: str :keyword module_scope: :paramtype module_scope: str :keyword module_additional_includes_count: :paramtype module_additional_includes_count: int :keyword module_os_type: :paramtype module_os_type: str :keyword module_codegen_by: :paramtype module_codegen_by: str :keyword module_client_source: :paramtype module_client_source: str :keyword module_is_builtin: :paramtype module_is_builtin: bool :keyword module_register_event_extension_fields: Dictionary of :code:`<string>`. :paramtype module_register_event_extension_fields: dict[str, str] """ super(RegisterComponentMetaInfo, self).__init__(**kwargs) self.aml_module_name = kwargs.get('aml_module_name', None) self.name_only_display_info = kwargs.get('name_only_display_info', None) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) self.module_version_id = kwargs.get('module_version_id', None) self.snapshot_id = kwargs.get('snapshot_id', None) self.component_registration_type = kwargs.get('component_registration_type', None) self.module_entity_from_yaml = kwargs.get('module_entity_from_yaml', None) self.set_as_default_version = kwargs.get('set_as_default_version', None) self.data_types_from_yaml = kwargs.get('data_types_from_yaml', None) self.data_type_mechanism = kwargs.get('data_type_mechanism', None) self.identifier_hash = kwargs.get('identifier_hash', None) self.identifier_hashes = kwargs.get('identifier_hashes', None) self.content_hash = kwargs.get('content_hash', None) self.extra_hash = kwargs.get('extra_hash', None) self.extra_hashes = kwargs.get('extra_hashes', None) self.registration = kwargs.get('registration', None) self.validate_only = kwargs.get('validate_only', None) self.skip_workspace_related_check = kwargs.get('skip_workspace_related_check', None) self.intellectual_property_protected_workspace_component_registration_allowed_publisher = kwargs.get('intellectual_property_protected_workspace_component_registration_allowed_publisher', None) self.system_managed_registration = kwargs.get('system_managed_registration', None) self.allow_dup_name_between_input_and_ouput_port = kwargs.get('allow_dup_name_between_input_and_ouput_port', None) self.module_source = kwargs.get('module_source', None) self.module_scope = kwargs.get('module_scope', None) self.module_additional_includes_count = kwargs.get('module_additional_includes_count', None) self.module_os_type = kwargs.get('module_os_type', None) self.module_codegen_by = kwargs.get('module_codegen_by', None) self.module_client_source = kwargs.get('module_client_source', None) self.module_is_builtin = kwargs.get('module_is_builtin', None) self.module_register_event_extension_fields = kwargs.get('module_register_event_extension_fields', None) class RegisterComponentMetaInfoExtraHashes(msrest.serialization.Model): """RegisterComponentMetaInfoExtraHashes. :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str """ _attribute_map = { 'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str """ super(RegisterComponentMetaInfoExtraHashes, self).__init__(**kwargs) self.identifier_hash = kwargs.get('identifier_hash', None) self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None) class RegisterComponentMetaInfoIdentifierHashes(msrest.serialization.Model): """RegisterComponentMetaInfoIdentifierHashes. :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str """ _attribute_map = { 'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str """ super(RegisterComponentMetaInfoIdentifierHashes, self).__init__(**kwargs) self.identifier_hash = kwargs.get('identifier_hash', None) self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None) class RegisteredDataSetReference(msrest.serialization.Model): """RegisteredDataSetReference. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar version: :vartype version: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword version: :paramtype version: str """ super(RegisteredDataSetReference, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) class RegisterRegistryComponentMetaInfo(msrest.serialization.Model): """RegisterRegistryComponentMetaInfo. :ivar registry_name: :vartype registry_name: str :ivar intellectual_property_publisher_information: :vartype intellectual_property_publisher_information: ~flow.models.IntellectualPropertyPublisherInformation :ivar blob_reference_data: This is a dictionary. :vartype blob_reference_data: dict[str, ~flow.models.RegistryBlobReferenceData] :ivar aml_module_name: :vartype aml_module_name: str :ivar name_only_display_info: :vartype name_only_display_info: str :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar module_version_id: :vartype module_version_id: str :ivar snapshot_id: :vartype snapshot_id: str :ivar component_registration_type: Possible values include: "Normal", "AnonymousAmlModule", "AnonymousAmlModuleVersion", "ModuleEntityOnly". :vartype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum :ivar module_entity_from_yaml: :vartype module_entity_from_yaml: ~flow.models.ModuleEntity :ivar set_as_default_version: :vartype set_as_default_version: bool :ivar data_types_from_yaml: :vartype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo] :ivar data_type_mechanism: Possible values include: "ErrorWhenNotExisting", "RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly". :vartype data_type_mechanism: str or ~flow.models.DataTypeMechanism :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hashes: :vartype identifier_hashes: ~flow.models.RegisterRegistryComponentMetaInfoIdentifierHashes :ivar content_hash: :vartype content_hash: str :ivar extra_hash: :vartype extra_hash: str :ivar extra_hashes: :vartype extra_hashes: ~flow.models.RegisterRegistryComponentMetaInfoExtraHashes :ivar registration: :vartype registration: bool :ivar validate_only: :vartype validate_only: bool :ivar skip_workspace_related_check: :vartype skip_workspace_related_check: bool :ivar intellectual_property_protected_workspace_component_registration_allowed_publisher: :vartype intellectual_property_protected_workspace_component_registration_allowed_publisher: list[str] :ivar system_managed_registration: :vartype system_managed_registration: bool :ivar allow_dup_name_between_input_and_ouput_port: :vartype allow_dup_name_between_input_and_ouput_port: bool :ivar module_source: :vartype module_source: str :ivar module_scope: :vartype module_scope: str :ivar module_additional_includes_count: :vartype module_additional_includes_count: int :ivar module_os_type: :vartype module_os_type: str :ivar module_codegen_by: :vartype module_codegen_by: str :ivar module_client_source: :vartype module_client_source: str :ivar module_is_builtin: :vartype module_is_builtin: bool :ivar module_register_event_extension_fields: Dictionary of :code:`<string>`. :vartype module_register_event_extension_fields: dict[str, str] """ _attribute_map = { 'registry_name': {'key': 'registryName', 'type': 'str'}, 'intellectual_property_publisher_information': {'key': 'intellectualPropertyPublisherInformation', 'type': 'IntellectualPropertyPublisherInformation'}, 'blob_reference_data': {'key': 'blobReferenceData', 'type': '{RegistryBlobReferenceData}'}, 'aml_module_name': {'key': 'amlModuleName', 'type': 'str'}, 'name_only_display_info': {'key': 'nameOnlyDisplayInfo', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'module_version_id': {'key': 'moduleVersionId', 'type': 'str'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, 'component_registration_type': {'key': 'componentRegistrationType', 'type': 'str'}, 'module_entity_from_yaml': {'key': 'moduleEntityFromYaml', 'type': 'ModuleEntity'}, 'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'}, 'data_types_from_yaml': {'key': 'dataTypesFromYaml', 'type': '[DataTypeCreationInfo]'}, 'data_type_mechanism': {'key': 'dataTypeMechanism', 'type': 'str'}, 'identifier_hash': {'key': 'identifierHash', 'type': 'str'}, 'identifier_hashes': {'key': 'identifierHashes', 'type': 'RegisterRegistryComponentMetaInfoIdentifierHashes'}, 'content_hash': {'key': 'contentHash', 'type': 'str'}, 'extra_hash': {'key': 'extraHash', 'type': 'str'}, 'extra_hashes': {'key': 'extraHashes', 'type': 'RegisterRegistryComponentMetaInfoExtraHashes'}, 'registration': {'key': 'registration', 'type': 'bool'}, 'validate_only': {'key': 'validateOnly', 'type': 'bool'}, 'skip_workspace_related_check': {'key': 'skipWorkspaceRelatedCheck', 'type': 'bool'}, 'intellectual_property_protected_workspace_component_registration_allowed_publisher': {'key': 'intellectualPropertyProtectedWorkspaceComponentRegistrationAllowedPublisher', 'type': '[str]'}, 'system_managed_registration': {'key': 'systemManagedRegistration', 'type': 'bool'}, 'allow_dup_name_between_input_and_ouput_port': {'key': 'allowDupNameBetweenInputAndOuputPort', 'type': 'bool'}, 'module_source': {'key': 'moduleSource', 'type': 'str'}, 'module_scope': {'key': 'moduleScope', 'type': 'str'}, 'module_additional_includes_count': {'key': 'moduleAdditionalIncludesCount', 'type': 'int'}, 'module_os_type': {'key': 'moduleOSType', 'type': 'str'}, 'module_codegen_by': {'key': 'moduleCodegenBy', 'type': 'str'}, 'module_client_source': {'key': 'moduleClientSource', 'type': 'str'}, 'module_is_builtin': {'key': 'moduleIsBuiltin', 'type': 'bool'}, 'module_register_event_extension_fields': {'key': 'moduleRegisterEventExtensionFields', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword registry_name: :paramtype registry_name: str :keyword intellectual_property_publisher_information: :paramtype intellectual_property_publisher_information: ~flow.models.IntellectualPropertyPublisherInformation :keyword blob_reference_data: This is a dictionary. :paramtype blob_reference_data: dict[str, ~flow.models.RegistryBlobReferenceData] :keyword aml_module_name: :paramtype aml_module_name: str :keyword name_only_display_info: :paramtype name_only_display_info: str :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword module_version_id: :paramtype module_version_id: str :keyword snapshot_id: :paramtype snapshot_id: str :keyword component_registration_type: Possible values include: "Normal", "AnonymousAmlModule", "AnonymousAmlModuleVersion", "ModuleEntityOnly". :paramtype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum :keyword module_entity_from_yaml: :paramtype module_entity_from_yaml: ~flow.models.ModuleEntity :keyword set_as_default_version: :paramtype set_as_default_version: bool :keyword data_types_from_yaml: :paramtype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo] :keyword data_type_mechanism: Possible values include: "ErrorWhenNotExisting", "RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly". :paramtype data_type_mechanism: str or ~flow.models.DataTypeMechanism :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hashes: :paramtype identifier_hashes: ~flow.models.RegisterRegistryComponentMetaInfoIdentifierHashes :keyword content_hash: :paramtype content_hash: str :keyword extra_hash: :paramtype extra_hash: str :keyword extra_hashes: :paramtype extra_hashes: ~flow.models.RegisterRegistryComponentMetaInfoExtraHashes :keyword registration: :paramtype registration: bool :keyword validate_only: :paramtype validate_only: bool :keyword skip_workspace_related_check: :paramtype skip_workspace_related_check: bool :keyword intellectual_property_protected_workspace_component_registration_allowed_publisher: :paramtype intellectual_property_protected_workspace_component_registration_allowed_publisher: list[str] :keyword system_managed_registration: :paramtype system_managed_registration: bool :keyword allow_dup_name_between_input_and_ouput_port: :paramtype allow_dup_name_between_input_and_ouput_port: bool :keyword module_source: :paramtype module_source: str :keyword module_scope: :paramtype module_scope: str :keyword module_additional_includes_count: :paramtype module_additional_includes_count: int :keyword module_os_type: :paramtype module_os_type: str :keyword module_codegen_by: :paramtype module_codegen_by: str :keyword module_client_source: :paramtype module_client_source: str :keyword module_is_builtin: :paramtype module_is_builtin: bool :keyword module_register_event_extension_fields: Dictionary of :code:`<string>`. :paramtype module_register_event_extension_fields: dict[str, str] """ super(RegisterRegistryComponentMetaInfo, self).__init__(**kwargs) self.registry_name = kwargs.get('registry_name', None) self.intellectual_property_publisher_information = kwargs.get('intellectual_property_publisher_information', None) self.blob_reference_data = kwargs.get('blob_reference_data', None) self.aml_module_name = kwargs.get('aml_module_name', None) self.name_only_display_info = kwargs.get('name_only_display_info', None) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) self.module_version_id = kwargs.get('module_version_id', None) self.snapshot_id = kwargs.get('snapshot_id', None) self.component_registration_type = kwargs.get('component_registration_type', None) self.module_entity_from_yaml = kwargs.get('module_entity_from_yaml', None) self.set_as_default_version = kwargs.get('set_as_default_version', None) self.data_types_from_yaml = kwargs.get('data_types_from_yaml', None) self.data_type_mechanism = kwargs.get('data_type_mechanism', None) self.identifier_hash = kwargs.get('identifier_hash', None) self.identifier_hashes = kwargs.get('identifier_hashes', None) self.content_hash = kwargs.get('content_hash', None) self.extra_hash = kwargs.get('extra_hash', None) self.extra_hashes = kwargs.get('extra_hashes', None) self.registration = kwargs.get('registration', None) self.validate_only = kwargs.get('validate_only', None) self.skip_workspace_related_check = kwargs.get('skip_workspace_related_check', None) self.intellectual_property_protected_workspace_component_registration_allowed_publisher = kwargs.get('intellectual_property_protected_workspace_component_registration_allowed_publisher', None) self.system_managed_registration = kwargs.get('system_managed_registration', None) self.allow_dup_name_between_input_and_ouput_port = kwargs.get('allow_dup_name_between_input_and_ouput_port', None) self.module_source = kwargs.get('module_source', None) self.module_scope = kwargs.get('module_scope', None) self.module_additional_includes_count = kwargs.get('module_additional_includes_count', None) self.module_os_type = kwargs.get('module_os_type', None) self.module_codegen_by = kwargs.get('module_codegen_by', None) self.module_client_source = kwargs.get('module_client_source', None) self.module_is_builtin = kwargs.get('module_is_builtin', None) self.module_register_event_extension_fields = kwargs.get('module_register_event_extension_fields', None) class RegisterRegistryComponentMetaInfoExtraHashes(msrest.serialization.Model): """RegisterRegistryComponentMetaInfoExtraHashes. :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str """ _attribute_map = { 'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str """ super(RegisterRegistryComponentMetaInfoExtraHashes, self).__init__(**kwargs) self.identifier_hash = kwargs.get('identifier_hash', None) self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None) class RegisterRegistryComponentMetaInfoIdentifierHashes(msrest.serialization.Model): """RegisterRegistryComponentMetaInfoIdentifierHashes. :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str """ _attribute_map = { 'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str """ super(RegisterRegistryComponentMetaInfoIdentifierHashes, self).__init__(**kwargs) self.identifier_hash = kwargs.get('identifier_hash', None) self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None) class RegistrationOptions(msrest.serialization.Model): """RegistrationOptions. :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar properties: Dictionary of :code:`<string>`. :vartype properties: dict[str, str] :ivar dataset_registration_options: :vartype dataset_registration_options: ~flow.models.DatasetRegistrationOptions """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'dataset_registration_options': {'key': 'datasetRegistrationOptions', 'type': 'DatasetRegistrationOptions'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword properties: Dictionary of :code:`<string>`. :paramtype properties: dict[str, str] :keyword dataset_registration_options: :paramtype dataset_registration_options: ~flow.models.DatasetRegistrationOptions """ super(RegistrationOptions, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.dataset_registration_options = kwargs.get('dataset_registration_options', None) class RegistryBlobReferenceData(msrest.serialization.Model): """RegistryBlobReferenceData. :ivar data_reference_id: :vartype data_reference_id: str :ivar data: :vartype data: str """ _attribute_map = { 'data_reference_id': {'key': 'dataReferenceId', 'type': 'str'}, 'data': {'key': 'data', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_reference_id: :paramtype data_reference_id: str :keyword data: :paramtype data: str """ super(RegistryBlobReferenceData, self).__init__(**kwargs) self.data_reference_id = kwargs.get('data_reference_id', None) self.data = kwargs.get('data', None) class RegistryIdentity(msrest.serialization.Model): """RegistryIdentity. :ivar resource_id: :vartype resource_id: str :ivar client_id: :vartype client_id: str """ _attribute_map = { 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'client_id': {'key': 'clientId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword resource_id: :paramtype resource_id: str :keyword client_id: :paramtype client_id: str """ super(RegistryIdentity, self).__init__(**kwargs) self.resource_id = kwargs.get('resource_id', None) self.client_id = kwargs.get('client_id', None) class Relationship(msrest.serialization.Model): """Relationship. Variables are only populated by the server, and will be ignored when sending a request. :ivar relation_type: :vartype relation_type: str :ivar target_entity_id: :vartype target_entity_id: str :ivar asset_id: :vartype asset_id: str :ivar entity_type: :vartype entity_type: str :ivar direction: :vartype direction: str :ivar entity_container_id: :vartype entity_container_id: str """ _validation = { 'entity_type': {'readonly': True}, 'entity_container_id': {'readonly': True}, } _attribute_map = { 'relation_type': {'key': 'relationType', 'type': 'str'}, 'target_entity_id': {'key': 'targetEntityId', 'type': 'str'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, 'entity_type': {'key': 'entityType', 'type': 'str'}, 'direction': {'key': 'direction', 'type': 'str'}, 'entity_container_id': {'key': 'entityContainerId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword relation_type: :paramtype relation_type: str :keyword target_entity_id: :paramtype target_entity_id: str :keyword asset_id: :paramtype asset_id: str :keyword direction: :paramtype direction: str """ super(Relationship, self).__init__(**kwargs) self.relation_type = kwargs.get('relation_type', None) self.target_entity_id = kwargs.get('target_entity_id', None) self.asset_id = kwargs.get('asset_id', None) self.entity_type = None self.direction = kwargs.get('direction', None) self.entity_container_id = None class RemoteDockerComputeInfo(msrest.serialization.Model): """RemoteDockerComputeInfo. :ivar address: :vartype address: str :ivar username: :vartype username: str :ivar password: :vartype password: str :ivar private_key: :vartype private_key: str """ _attribute_map = { 'address': {'key': 'address', 'type': 'str'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'private_key': {'key': 'privateKey', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword address: :paramtype address: str :keyword username: :paramtype username: str :keyword password: :paramtype password: str :keyword private_key: :paramtype private_key: str """ super(RemoteDockerComputeInfo, self).__init__(**kwargs) self.address = kwargs.get('address', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.private_key = kwargs.get('private_key', None) class ResourceConfig(msrest.serialization.Model): """ResourceConfig. :ivar gpu_count: :vartype gpu_count: int :ivar cpu_count: :vartype cpu_count: int :ivar memory_request_in_gb: :vartype memory_request_in_gb: int """ _attribute_map = { 'gpu_count': {'key': 'gpuCount', 'type': 'int'}, 'cpu_count': {'key': 'cpuCount', 'type': 'int'}, 'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword gpu_count: :paramtype gpu_count: int :keyword cpu_count: :paramtype cpu_count: int :keyword memory_request_in_gb: :paramtype memory_request_in_gb: int """ super(ResourceConfig, self).__init__(**kwargs) self.gpu_count = kwargs.get('gpu_count', None) self.cpu_count = kwargs.get('cpu_count', None) self.memory_request_in_gb = kwargs.get('memory_request_in_gb', None) class ResourceConfiguration(msrest.serialization.Model): """ResourceConfiguration. :ivar gpu_count: :vartype gpu_count: int :ivar cpu_count: :vartype cpu_count: int :ivar memory_request_in_gb: :vartype memory_request_in_gb: int """ _attribute_map = { 'gpu_count': {'key': 'gpuCount', 'type': 'int'}, 'cpu_count': {'key': 'cpuCount', 'type': 'int'}, 'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword gpu_count: :paramtype gpu_count: int :keyword cpu_count: :paramtype cpu_count: int :keyword memory_request_in_gb: :paramtype memory_request_in_gb: int """ super(ResourceConfiguration, self).__init__(**kwargs) self.gpu_count = kwargs.get('gpu_count', None) self.cpu_count = kwargs.get('cpu_count', None) self.memory_request_in_gb = kwargs.get('memory_request_in_gb', None) class ResourcesSetting(msrest.serialization.Model): """ResourcesSetting. :ivar instance_size: :vartype instance_size: str :ivar spark_version: :vartype spark_version: str """ _attribute_map = { 'instance_size': {'key': 'instanceSize', 'type': 'str'}, 'spark_version': {'key': 'sparkVersion', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword instance_size: :paramtype instance_size: str :keyword spark_version: :paramtype spark_version: str """ super(ResourcesSetting, self).__init__(**kwargs) self.instance_size = kwargs.get('instance_size', None) self.spark_version = kwargs.get('spark_version', None) class RetrieveToolFuncResultRequest(msrest.serialization.Model): """RetrieveToolFuncResultRequest. :ivar func_path: :vartype func_path: str :ivar func_kwargs: This is a dictionary. :vartype func_kwargs: dict[str, any] :ivar func_call_scenario: Possible values include: "generated_by", "reverse_generated_by", "dynamic_list". :vartype func_call_scenario: str or ~flow.models.ToolFuncCallScenario """ _attribute_map = { 'func_path': {'key': 'func_path', 'type': 'str'}, 'func_kwargs': {'key': 'func_kwargs', 'type': '{object}'}, 'func_call_scenario': {'key': 'func_call_scenario', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword func_path: :paramtype func_path: str :keyword func_kwargs: This is a dictionary. :paramtype func_kwargs: dict[str, any] :keyword func_call_scenario: Possible values include: "generated_by", "reverse_generated_by", "dynamic_list". :paramtype func_call_scenario: str or ~flow.models.ToolFuncCallScenario """ super(RetrieveToolFuncResultRequest, self).__init__(**kwargs) self.func_path = kwargs.get('func_path', None) self.func_kwargs = kwargs.get('func_kwargs', None) self.func_call_scenario = kwargs.get('func_call_scenario', None) class RetryConfiguration(msrest.serialization.Model): """RetryConfiguration. :ivar max_retry_count: :vartype max_retry_count: int """ _attribute_map = { 'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword max_retry_count: :paramtype max_retry_count: int """ super(RetryConfiguration, self).__init__(**kwargs) self.max_retry_count = kwargs.get('max_retry_count', None) class RGitHubPackage(msrest.serialization.Model): """RGitHubPackage. :ivar repository: :vartype repository: str :ivar auth_token: :vartype auth_token: str """ _attribute_map = { 'repository': {'key': 'repository', 'type': 'str'}, 'auth_token': {'key': 'authToken', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword repository: :paramtype repository: str :keyword auth_token: :paramtype auth_token: str """ super(RGitHubPackage, self).__init__(**kwargs) self.repository = kwargs.get('repository', None) self.auth_token = kwargs.get('auth_token', None) class RootError(msrest.serialization.Model): """The root error. :ivar code: The service-defined error code. Supported error codes: ServiceError, UserError, ValidationError, AzureStorageError, TransientError, RequestThrottled. :vartype code: str :ivar severity: The Severity of error. :vartype severity: int :ivar message: A human-readable representation of the error. :vartype message: str :ivar message_format: An unformatted version of the message with no variable substitution. :vartype message_format: str :ivar message_parameters: Value substitutions corresponding to the contents of MessageFormat. :vartype message_parameters: dict[str, str] :ivar reference_code: This code can optionally be set by the system generating the error. It should be used to classify the problem and identify the module and code area where the failure occured. :vartype reference_code: str :ivar details_uri: A URI which points to more details about the context of the error. :vartype details_uri: str :ivar target: The target of the error (e.g., the name of the property in error). :vartype target: str :ivar details: The related errors that occurred during the request. :vartype details: list[~flow.models.RootError] :ivar inner_error: A nested structure of errors. :vartype inner_error: ~flow.models.InnerErrorResponse :ivar additional_info: The error additional info. :vartype additional_info: list[~flow.models.ErrorAdditionalInfo] """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'severity': {'key': 'severity', 'type': 'int'}, 'message': {'key': 'message', 'type': 'str'}, 'message_format': {'key': 'messageFormat', 'type': 'str'}, 'message_parameters': {'key': 'messageParameters', 'type': '{str}'}, 'reference_code': {'key': 'referenceCode', 'type': 'str'}, 'details_uri': {'key': 'detailsUri', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[RootError]'}, 'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'}, 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, } def __init__( self, **kwargs ): """ :keyword code: The service-defined error code. Supported error codes: ServiceError, UserError, ValidationError, AzureStorageError, TransientError, RequestThrottled. :paramtype code: str :keyword severity: The Severity of error. :paramtype severity: int :keyword message: A human-readable representation of the error. :paramtype message: str :keyword message_format: An unformatted version of the message with no variable substitution. :paramtype message_format: str :keyword message_parameters: Value substitutions corresponding to the contents of MessageFormat. :paramtype message_parameters: dict[str, str] :keyword reference_code: This code can optionally be set by the system generating the error. It should be used to classify the problem and identify the module and code area where the failure occured. :paramtype reference_code: str :keyword details_uri: A URI which points to more details about the context of the error. :paramtype details_uri: str :keyword target: The target of the error (e.g., the name of the property in error). :paramtype target: str :keyword details: The related errors that occurred during the request. :paramtype details: list[~flow.models.RootError] :keyword inner_error: A nested structure of errors. :paramtype inner_error: ~flow.models.InnerErrorResponse :keyword additional_info: The error additional info. :paramtype additional_info: list[~flow.models.ErrorAdditionalInfo] """ super(RootError, self).__init__(**kwargs) self.code = kwargs.get('code', None) self.severity = kwargs.get('severity', None) self.message = kwargs.get('message', None) self.message_format = kwargs.get('message_format', None) self.message_parameters = kwargs.get('message_parameters', None) self.reference_code = kwargs.get('reference_code', None) self.details_uri = kwargs.get('details_uri', None) self.target = kwargs.get('target', None) self.details = kwargs.get('details', None) self.inner_error = kwargs.get('inner_error', None) self.additional_info = kwargs.get('additional_info', None) class RSection(msrest.serialization.Model): """RSection. :ivar r_version: :vartype r_version: str :ivar user_managed: :vartype user_managed: bool :ivar rscript_path: :vartype rscript_path: str :ivar snapshot_date: :vartype snapshot_date: str :ivar cran_packages: :vartype cran_packages: list[~flow.models.RCranPackage] :ivar git_hub_packages: :vartype git_hub_packages: list[~flow.models.RGitHubPackage] :ivar custom_url_packages: :vartype custom_url_packages: list[str] :ivar bio_conductor_packages: :vartype bio_conductor_packages: list[str] """ _attribute_map = { 'r_version': {'key': 'rVersion', 'type': 'str'}, 'user_managed': {'key': 'userManaged', 'type': 'bool'}, 'rscript_path': {'key': 'rscriptPath', 'type': 'str'}, 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'}, 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'}, 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'}, 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'}, 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword r_version: :paramtype r_version: str :keyword user_managed: :paramtype user_managed: bool :keyword rscript_path: :paramtype rscript_path: str :keyword snapshot_date: :paramtype snapshot_date: str :keyword cran_packages: :paramtype cran_packages: list[~flow.models.RCranPackage] :keyword git_hub_packages: :paramtype git_hub_packages: list[~flow.models.RGitHubPackage] :keyword custom_url_packages: :paramtype custom_url_packages: list[str] :keyword bio_conductor_packages: :paramtype bio_conductor_packages: list[str] """ super(RSection, self).__init__(**kwargs) self.r_version = kwargs.get('r_version', None) self.user_managed = kwargs.get('user_managed', None) self.rscript_path = kwargs.get('rscript_path', None) self.snapshot_date = kwargs.get('snapshot_date', None) self.cran_packages = kwargs.get('cran_packages', None) self.git_hub_packages = kwargs.get('git_hub_packages', None) self.custom_url_packages = kwargs.get('custom_url_packages', None) self.bio_conductor_packages = kwargs.get('bio_conductor_packages', None) class RunAnnotations(msrest.serialization.Model): """RunAnnotations. :ivar display_name: :vartype display_name: str :ivar status: :vartype status: str :ivar primary_metric_name: :vartype primary_metric_name: str :ivar estimated_cost: :vartype estimated_cost: float :ivar primary_metric_summary: :vartype primary_metric_summary: ~flow.models.RunIndexMetricSummary :ivar metrics: Dictionary of :code:`<RunIndexMetricSummarySystemObject>`. :vartype metrics: dict[str, ~flow.models.RunIndexMetricSummarySystemObject] :ivar parameters: Dictionary of :code:`<any>`. :vartype parameters: dict[str, any] :ivar settings: Dictionary of :code:`<string>`. :vartype settings: dict[str, str] :ivar modified_time: :vartype modified_time: ~datetime.datetime :ivar retain_for_lifetime_of_workspace: :vartype retain_for_lifetime_of_workspace: bool :ivar error: :vartype error: ~flow.models.IndexedErrorResponse :ivar resource_metric_summary: :vartype resource_metric_summary: ~flow.models.RunIndexResourceMetricSummary :ivar job_cost: :vartype job_cost: ~flow.models.JobCost :ivar compute_duration: :vartype compute_duration: str :ivar compute_duration_milliseconds: :vartype compute_duration_milliseconds: float :ivar effective_start_time_utc: :vartype effective_start_time_utc: ~datetime.datetime :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar archived: :vartype archived: bool :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] """ _attribute_map = { 'display_name': {'key': 'displayName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'}, 'estimated_cost': {'key': 'estimatedCost', 'type': 'float'}, 'primary_metric_summary': {'key': 'primaryMetricSummary', 'type': 'RunIndexMetricSummary'}, 'metrics': {'key': 'metrics', 'type': '{RunIndexMetricSummarySystemObject}'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, 'settings': {'key': 'settings', 'type': '{str}'}, 'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'}, 'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'}, 'error': {'key': 'error', 'type': 'IndexedErrorResponse'}, 'resource_metric_summary': {'key': 'resourceMetricSummary', 'type': 'RunIndexResourceMetricSummary'}, 'job_cost': {'key': 'jobCost', 'type': 'JobCost'}, 'compute_duration': {'key': 'computeDuration', 'type': 'str'}, 'compute_duration_milliseconds': {'key': 'computeDurationMilliseconds', 'type': 'float'}, 'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'archived': {'key': 'archived', 'type': 'bool'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword display_name: :paramtype display_name: str :keyword status: :paramtype status: str :keyword primary_metric_name: :paramtype primary_metric_name: str :keyword estimated_cost: :paramtype estimated_cost: float :keyword primary_metric_summary: :paramtype primary_metric_summary: ~flow.models.RunIndexMetricSummary :keyword metrics: Dictionary of :code:`<RunIndexMetricSummarySystemObject>`. :paramtype metrics: dict[str, ~flow.models.RunIndexMetricSummarySystemObject] :keyword parameters: Dictionary of :code:`<any>`. :paramtype parameters: dict[str, any] :keyword settings: Dictionary of :code:`<string>`. :paramtype settings: dict[str, str] :keyword modified_time: :paramtype modified_time: ~datetime.datetime :keyword retain_for_lifetime_of_workspace: :paramtype retain_for_lifetime_of_workspace: bool :keyword error: :paramtype error: ~flow.models.IndexedErrorResponse :keyword resource_metric_summary: :paramtype resource_metric_summary: ~flow.models.RunIndexResourceMetricSummary :keyword job_cost: :paramtype job_cost: ~flow.models.JobCost :keyword compute_duration: :paramtype compute_duration: str :keyword compute_duration_milliseconds: :paramtype compute_duration_milliseconds: float :keyword effective_start_time_utc: :paramtype effective_start_time_utc: ~datetime.datetime :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword archived: :paramtype archived: bool :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] """ super(RunAnnotations, self).__init__(**kwargs) self.display_name = kwargs.get('display_name', None) self.status = kwargs.get('status', None) self.primary_metric_name = kwargs.get('primary_metric_name', None) self.estimated_cost = kwargs.get('estimated_cost', None) self.primary_metric_summary = kwargs.get('primary_metric_summary', None) self.metrics = kwargs.get('metrics', None) self.parameters = kwargs.get('parameters', None) self.settings = kwargs.get('settings', None) self.modified_time = kwargs.get('modified_time', None) self.retain_for_lifetime_of_workspace = kwargs.get('retain_for_lifetime_of_workspace', None) self.error = kwargs.get('error', None) self.resource_metric_summary = kwargs.get('resource_metric_summary', None) self.job_cost = kwargs.get('job_cost', None) self.compute_duration = kwargs.get('compute_duration', None) self.compute_duration_milliseconds = kwargs.get('compute_duration_milliseconds', None) self.effective_start_time_utc = kwargs.get('effective_start_time_utc', None) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.archived = kwargs.get('archived', None) self.tags = kwargs.get('tags', None) class RunConfiguration(msrest.serialization.Model): """RunConfiguration. :ivar script: :vartype script: str :ivar script_type: Possible values include: "Python", "Notebook". :vartype script_type: str or ~flow.models.ScriptType :ivar command: :vartype command: str :ivar use_absolute_path: :vartype use_absolute_path: bool :ivar arguments: :vartype arguments: list[str] :ivar framework: Possible values include: "Python", "PySpark", "Cntk", "TensorFlow", "PyTorch", "PySparkInteractive", "R". :vartype framework: str or ~flow.models.Framework :ivar communicator: Possible values include: "None", "ParameterServer", "Gloo", "Mpi", "Nccl", "ParallelTask". :vartype communicator: str or ~flow.models.Communicator :ivar target: :vartype target: str :ivar auto_cluster_compute_specification: :vartype auto_cluster_compute_specification: ~flow.models.AutoClusterComputeSpecification :ivar data_references: Dictionary of :code:`<DataReferenceConfiguration>`. :vartype data_references: dict[str, ~flow.models.DataReferenceConfiguration] :ivar data: Dictionary of :code:`<Data>`. :vartype data: dict[str, ~flow.models.Data] :ivar input_assets: Dictionary of :code:`<InputAsset>`. :vartype input_assets: dict[str, ~flow.models.InputAsset] :ivar output_data: Dictionary of :code:`<OutputData>`. :vartype output_data: dict[str, ~flow.models.OutputData] :ivar datacaches: :vartype datacaches: list[~flow.models.DatacacheConfiguration] :ivar job_name: :vartype job_name: str :ivar max_run_duration_seconds: :vartype max_run_duration_seconds: long :ivar node_count: :vartype node_count: int :ivar max_node_count: :vartype max_node_count: int :ivar instance_types: :vartype instance_types: list[str] :ivar priority: :vartype priority: int :ivar credential_passthrough: :vartype credential_passthrough: bool :ivar identity: :vartype identity: ~flow.models.IdentityConfiguration :ivar environment: :vartype environment: ~flow.models.EnvironmentDefinition :ivar history: :vartype history: ~flow.models.HistoryConfiguration :ivar spark: :vartype spark: ~flow.models.SparkConfiguration :ivar parallel_task: :vartype parallel_task: ~flow.models.ParallelTaskConfiguration :ivar tensorflow: :vartype tensorflow: ~flow.models.TensorflowConfiguration :ivar mpi: :vartype mpi: ~flow.models.MpiConfiguration :ivar py_torch: :vartype py_torch: ~flow.models.PyTorchConfiguration :ivar ray: :vartype ray: ~flow.models.RayConfiguration :ivar hdi: :vartype hdi: ~flow.models.HdiConfiguration :ivar docker: :vartype docker: ~flow.models.DockerConfiguration :ivar command_return_code_config: :vartype command_return_code_config: ~flow.models.CommandReturnCodeConfig :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`. :vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration] :ivar parameters: :vartype parameters: list[~flow.models.ParameterDefinition] :ivar autologger_settings: :vartype autologger_settings: ~flow.models.AutologgerSettings :ivar data_bricks: :vartype data_bricks: ~flow.models.DatabricksConfiguration :ivar training_diagnostic_config: :vartype training_diagnostic_config: ~flow.models.TrainingDiagnosticConfiguration :ivar secrets_configuration: Dictionary of :code:`<SecretConfiguration>`. :vartype secrets_configuration: dict[str, ~flow.models.SecretConfiguration] """ _attribute_map = { 'script': {'key': 'script', 'type': 'str'}, 'script_type': {'key': 'scriptType', 'type': 'str'}, 'command': {'key': 'command', 'type': 'str'}, 'use_absolute_path': {'key': 'useAbsolutePath', 'type': 'bool'}, 'arguments': {'key': 'arguments', 'type': '[str]'}, 'framework': {'key': 'framework', 'type': 'str'}, 'communicator': {'key': 'communicator', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'auto_cluster_compute_specification': {'key': 'autoClusterComputeSpecification', 'type': 'AutoClusterComputeSpecification'}, 'data_references': {'key': 'dataReferences', 'type': '{DataReferenceConfiguration}'}, 'data': {'key': 'data', 'type': '{Data}'}, 'input_assets': {'key': 'inputAssets', 'type': '{InputAsset}'}, 'output_data': {'key': 'outputData', 'type': '{OutputData}'}, 'datacaches': {'key': 'datacaches', 'type': '[DatacacheConfiguration]'}, 'job_name': {'key': 'jobName', 'type': 'str'}, 'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'long'}, 'node_count': {'key': 'nodeCount', 'type': 'int'}, 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, 'instance_types': {'key': 'instanceTypes', 'type': '[str]'}, 'priority': {'key': 'priority', 'type': 'int'}, 'credential_passthrough': {'key': 'credentialPassthrough', 'type': 'bool'}, 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, 'environment': {'key': 'environment', 'type': 'EnvironmentDefinition'}, 'history': {'key': 'history', 'type': 'HistoryConfiguration'}, 'spark': {'key': 'spark', 'type': 'SparkConfiguration'}, 'parallel_task': {'key': 'parallelTask', 'type': 'ParallelTaskConfiguration'}, 'tensorflow': {'key': 'tensorflow', 'type': 'TensorflowConfiguration'}, 'mpi': {'key': 'mpi', 'type': 'MpiConfiguration'}, 'py_torch': {'key': 'pyTorch', 'type': 'PyTorchConfiguration'}, 'ray': {'key': 'ray', 'type': 'RayConfiguration'}, 'hdi': {'key': 'hdi', 'type': 'HdiConfiguration'}, 'docker': {'key': 'docker', 'type': 'DockerConfiguration'}, 'command_return_code_config': {'key': 'commandReturnCodeConfig', 'type': 'CommandReturnCodeConfig'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'}, 'parameters': {'key': 'parameters', 'type': '[ParameterDefinition]'}, 'autologger_settings': {'key': 'autologgerSettings', 'type': 'AutologgerSettings'}, 'data_bricks': {'key': 'dataBricks', 'type': 'DatabricksConfiguration'}, 'training_diagnostic_config': {'key': 'trainingDiagnosticConfig', 'type': 'TrainingDiagnosticConfiguration'}, 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, } def __init__( self, **kwargs ): """ :keyword script: :paramtype script: str :keyword script_type: Possible values include: "Python", "Notebook". :paramtype script_type: str or ~flow.models.ScriptType :keyword command: :paramtype command: str :keyword use_absolute_path: :paramtype use_absolute_path: bool :keyword arguments: :paramtype arguments: list[str] :keyword framework: Possible values include: "Python", "PySpark", "Cntk", "TensorFlow", "PyTorch", "PySparkInteractive", "R". :paramtype framework: str or ~flow.models.Framework :keyword communicator: Possible values include: "None", "ParameterServer", "Gloo", "Mpi", "Nccl", "ParallelTask". :paramtype communicator: str or ~flow.models.Communicator :keyword target: :paramtype target: str :keyword auto_cluster_compute_specification: :paramtype auto_cluster_compute_specification: ~flow.models.AutoClusterComputeSpecification :keyword data_references: Dictionary of :code:`<DataReferenceConfiguration>`. :paramtype data_references: dict[str, ~flow.models.DataReferenceConfiguration] :keyword data: Dictionary of :code:`<Data>`. :paramtype data: dict[str, ~flow.models.Data] :keyword input_assets: Dictionary of :code:`<InputAsset>`. :paramtype input_assets: dict[str, ~flow.models.InputAsset] :keyword output_data: Dictionary of :code:`<OutputData>`. :paramtype output_data: dict[str, ~flow.models.OutputData] :keyword datacaches: :paramtype datacaches: list[~flow.models.DatacacheConfiguration] :keyword job_name: :paramtype job_name: str :keyword max_run_duration_seconds: :paramtype max_run_duration_seconds: long :keyword node_count: :paramtype node_count: int :keyword max_node_count: :paramtype max_node_count: int :keyword instance_types: :paramtype instance_types: list[str] :keyword priority: :paramtype priority: int :keyword credential_passthrough: :paramtype credential_passthrough: bool :keyword identity: :paramtype identity: ~flow.models.IdentityConfiguration :keyword environment: :paramtype environment: ~flow.models.EnvironmentDefinition :keyword history: :paramtype history: ~flow.models.HistoryConfiguration :keyword spark: :paramtype spark: ~flow.models.SparkConfiguration :keyword parallel_task: :paramtype parallel_task: ~flow.models.ParallelTaskConfiguration :keyword tensorflow: :paramtype tensorflow: ~flow.models.TensorflowConfiguration :keyword mpi: :paramtype mpi: ~flow.models.MpiConfiguration :keyword py_torch: :paramtype py_torch: ~flow.models.PyTorchConfiguration :keyword ray: :paramtype ray: ~flow.models.RayConfiguration :keyword hdi: :paramtype hdi: ~flow.models.HdiConfiguration :keyword docker: :paramtype docker: ~flow.models.DockerConfiguration :keyword command_return_code_config: :paramtype command_return_code_config: ~flow.models.CommandReturnCodeConfig :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`. :paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration] :keyword parameters: :paramtype parameters: list[~flow.models.ParameterDefinition] :keyword autologger_settings: :paramtype autologger_settings: ~flow.models.AutologgerSettings :keyword data_bricks: :paramtype data_bricks: ~flow.models.DatabricksConfiguration :keyword training_diagnostic_config: :paramtype training_diagnostic_config: ~flow.models.TrainingDiagnosticConfiguration :keyword secrets_configuration: Dictionary of :code:`<SecretConfiguration>`. :paramtype secrets_configuration: dict[str, ~flow.models.SecretConfiguration] """ super(RunConfiguration, self).__init__(**kwargs) self.script = kwargs.get('script', None) self.script_type = kwargs.get('script_type', None) self.command = kwargs.get('command', None) self.use_absolute_path = kwargs.get('use_absolute_path', None) self.arguments = kwargs.get('arguments', None) self.framework = kwargs.get('framework', None) self.communicator = kwargs.get('communicator', None) self.target = kwargs.get('target', None) self.auto_cluster_compute_specification = kwargs.get('auto_cluster_compute_specification', None) self.data_references = kwargs.get('data_references', None) self.data = kwargs.get('data', None) self.input_assets = kwargs.get('input_assets', None) self.output_data = kwargs.get('output_data', None) self.datacaches = kwargs.get('datacaches', None) self.job_name = kwargs.get('job_name', None) self.max_run_duration_seconds = kwargs.get('max_run_duration_seconds', None) self.node_count = kwargs.get('node_count', None) self.max_node_count = kwargs.get('max_node_count', None) self.instance_types = kwargs.get('instance_types', None) self.priority = kwargs.get('priority', None) self.credential_passthrough = kwargs.get('credential_passthrough', None) self.identity = kwargs.get('identity', None) self.environment = kwargs.get('environment', None) self.history = kwargs.get('history', None) self.spark = kwargs.get('spark', None) self.parallel_task = kwargs.get('parallel_task', None) self.tensorflow = kwargs.get('tensorflow', None) self.mpi = kwargs.get('mpi', None) self.py_torch = kwargs.get('py_torch', None) self.ray = kwargs.get('ray', None) self.hdi = kwargs.get('hdi', None) self.docker = kwargs.get('docker', None) self.command_return_code_config = kwargs.get('command_return_code_config', None) self.environment_variables = kwargs.get('environment_variables', None) self.application_endpoints = kwargs.get('application_endpoints', None) self.parameters = kwargs.get('parameters', None) self.autologger_settings = kwargs.get('autologger_settings', None) self.data_bricks = kwargs.get('data_bricks', None) self.training_diagnostic_config = kwargs.get('training_diagnostic_config', None) self.secrets_configuration = kwargs.get('secrets_configuration', None) class RunDatasetReference(msrest.serialization.Model): """RunDatasetReference. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar version: :vartype version: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword version: :paramtype version: str """ super(RunDatasetReference, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.name = kwargs.get('name', None) self.version = kwargs.get('version', None) class RunDefinition(msrest.serialization.Model): """RunDefinition. :ivar configuration: :vartype configuration: ~flow.models.RunConfiguration :ivar snapshot_id: :vartype snapshot_id: str :ivar snapshots: :vartype snapshots: list[~flow.models.Snapshot] :ivar parent_run_id: :vartype parent_run_id: str :ivar run_type: :vartype run_type: str :ivar display_name: :vartype display_name: str :ivar environment_asset_id: :vartype environment_asset_id: str :ivar primary_metric_name: :vartype primary_metric_name: str :ivar description: :vartype description: str :ivar cancel_reason: :vartype cancel_reason: str :ivar properties: Dictionary of :code:`<string>`. :vartype properties: dict[str, str] :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] """ _attribute_map = { 'configuration': {'key': 'configuration', 'type': 'RunConfiguration'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, 'snapshots': {'key': 'snapshots', 'type': '[Snapshot]'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'run_type': {'key': 'runType', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'}, 'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'cancel_reason': {'key': 'cancelReason', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword configuration: :paramtype configuration: ~flow.models.RunConfiguration :keyword snapshot_id: :paramtype snapshot_id: str :keyword snapshots: :paramtype snapshots: list[~flow.models.Snapshot] :keyword parent_run_id: :paramtype parent_run_id: str :keyword run_type: :paramtype run_type: str :keyword display_name: :paramtype display_name: str :keyword environment_asset_id: :paramtype environment_asset_id: str :keyword primary_metric_name: :paramtype primary_metric_name: str :keyword description: :paramtype description: str :keyword cancel_reason: :paramtype cancel_reason: str :keyword properties: Dictionary of :code:`<string>`. :paramtype properties: dict[str, str] :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] """ super(RunDefinition, self).__init__(**kwargs) self.configuration = kwargs.get('configuration', None) self.snapshot_id = kwargs.get('snapshot_id', None) self.snapshots = kwargs.get('snapshots', None) self.parent_run_id = kwargs.get('parent_run_id', None) self.run_type = kwargs.get('run_type', None) self.display_name = kwargs.get('display_name', None) self.environment_asset_id = kwargs.get('environment_asset_id', None) self.primary_metric_name = kwargs.get('primary_metric_name', None) self.description = kwargs.get('description', None) self.cancel_reason = kwargs.get('cancel_reason', None) self.properties = kwargs.get('properties', None) self.tags = kwargs.get('tags', None) class RunDetailsDto(msrest.serialization.Model): """RunDetailsDto. :ivar run_id: :vartype run_id: str :ivar run_uuid: :vartype run_uuid: str :ivar parent_run_uuid: :vartype parent_run_uuid: str :ivar root_run_uuid: :vartype root_run_uuid: str :ivar target: :vartype target: str :ivar status: :vartype status: str :ivar parent_run_id: :vartype parent_run_id: str :ivar data_container_id: :vartype data_container_id: str :ivar created_time_utc: :vartype created_time_utc: ~datetime.datetime :ivar start_time_utc: :vartype start_time_utc: ~datetime.datetime :ivar end_time_utc: :vartype end_time_utc: ~datetime.datetime :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse :ivar warnings: :vartype warnings: list[~flow.models.RunDetailsWarningDto] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar parameters: Dictionary of :code:`<any>`. :vartype parameters: dict[str, any] :ivar services: This is a dictionary. :vartype services: dict[str, ~flow.models.EndpointSetting] :ivar input_datasets: :vartype input_datasets: list[~flow.models.DatasetLineage] :ivar output_datasets: :vartype output_datasets: list[~flow.models.OutputDatasetLineage] :ivar run_definition: Anything. :vartype run_definition: any :ivar log_files: This is a dictionary. :vartype log_files: dict[str, str] :ivar job_cost: :vartype job_cost: ~flow.models.JobCost :ivar revision: :vartype revision: long :ivar run_type_v2: :vartype run_type_v2: ~flow.models.RunTypeV2 :ivar settings: This is a dictionary. :vartype settings: dict[str, str] :ivar compute_request: :vartype compute_request: ~flow.models.ComputeRequest :ivar compute: :vartype compute: ~flow.models.Compute :ivar created_by: :vartype created_by: ~flow.models.User :ivar compute_duration: :vartype compute_duration: str :ivar effective_start_time_utc: :vartype effective_start_time_utc: ~datetime.datetime :ivar run_number: :vartype run_number: int :ivar root_run_id: :vartype root_run_id: str :ivar experiment_id: :vartype experiment_id: str :ivar user_id: :vartype user_id: str :ivar status_revision: :vartype status_revision: long :ivar current_compute_time: :vartype current_compute_time: str :ivar last_start_time_utc: :vartype last_start_time_utc: ~datetime.datetime :ivar last_modified_by: :vartype last_modified_by: ~flow.models.User :ivar last_modified_utc: :vartype last_modified_utc: ~datetime.datetime :ivar duration: :vartype duration: str :ivar inputs: Dictionary of :code:`<TypedAssetReference>`. :vartype inputs: dict[str, ~flow.models.TypedAssetReference] :ivar outputs: Dictionary of :code:`<TypedAssetReference>`. :vartype outputs: dict[str, ~flow.models.TypedAssetReference] :ivar current_attempt_id: :vartype current_attempt_id: int """ _validation = { 'input_datasets': {'unique': True}, 'output_datasets': {'unique': True}, } _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, 'run_uuid': {'key': 'runUuid', 'type': 'str'}, 'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'}, 'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'data_container_id': {'key': 'dataContainerId', 'type': 'str'}, 'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'}, 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'}, 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, 'warnings': {'key': 'warnings', 'type': '[RunDetailsWarningDto]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, 'services': {'key': 'services', 'type': '{EndpointSetting}'}, 'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'}, 'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'}, 'run_definition': {'key': 'runDefinition', 'type': 'object'}, 'log_files': {'key': 'logFiles', 'type': '{str}'}, 'job_cost': {'key': 'jobCost', 'type': 'JobCost'}, 'revision': {'key': 'revision', 'type': 'long'}, 'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'}, 'settings': {'key': 'settings', 'type': '{str}'}, 'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'}, 'compute': {'key': 'compute', 'type': 'Compute'}, 'created_by': {'key': 'createdBy', 'type': 'User'}, 'compute_duration': {'key': 'computeDuration', 'type': 'str'}, 'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'}, 'run_number': {'key': 'runNumber', 'type': 'int'}, 'root_run_id': {'key': 'rootRunId', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'user_id': {'key': 'userId', 'type': 'str'}, 'status_revision': {'key': 'statusRevision', 'type': 'long'}, 'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'}, 'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'}, 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'}, 'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'}, 'duration': {'key': 'duration', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'}, 'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'}, 'current_attempt_id': {'key': 'currentAttemptId', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword run_id: :paramtype run_id: str :keyword run_uuid: :paramtype run_uuid: str :keyword parent_run_uuid: :paramtype parent_run_uuid: str :keyword root_run_uuid: :paramtype root_run_uuid: str :keyword target: :paramtype target: str :keyword status: :paramtype status: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword data_container_id: :paramtype data_container_id: str :keyword created_time_utc: :paramtype created_time_utc: ~datetime.datetime :keyword start_time_utc: :paramtype start_time_utc: ~datetime.datetime :keyword end_time_utc: :paramtype end_time_utc: ~datetime.datetime :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse :keyword warnings: :paramtype warnings: list[~flow.models.RunDetailsWarningDto] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword parameters: Dictionary of :code:`<any>`. :paramtype parameters: dict[str, any] :keyword services: This is a dictionary. :paramtype services: dict[str, ~flow.models.EndpointSetting] :keyword input_datasets: :paramtype input_datasets: list[~flow.models.DatasetLineage] :keyword output_datasets: :paramtype output_datasets: list[~flow.models.OutputDatasetLineage] :keyword run_definition: Anything. :paramtype run_definition: any :keyword log_files: This is a dictionary. :paramtype log_files: dict[str, str] :keyword job_cost: :paramtype job_cost: ~flow.models.JobCost :keyword revision: :paramtype revision: long :keyword run_type_v2: :paramtype run_type_v2: ~flow.models.RunTypeV2 :keyword settings: This is a dictionary. :paramtype settings: dict[str, str] :keyword compute_request: :paramtype compute_request: ~flow.models.ComputeRequest :keyword compute: :paramtype compute: ~flow.models.Compute :keyword created_by: :paramtype created_by: ~flow.models.User :keyword compute_duration: :paramtype compute_duration: str :keyword effective_start_time_utc: :paramtype effective_start_time_utc: ~datetime.datetime :keyword run_number: :paramtype run_number: int :keyword root_run_id: :paramtype root_run_id: str :keyword experiment_id: :paramtype experiment_id: str :keyword user_id: :paramtype user_id: str :keyword status_revision: :paramtype status_revision: long :keyword current_compute_time: :paramtype current_compute_time: str :keyword last_start_time_utc: :paramtype last_start_time_utc: ~datetime.datetime :keyword last_modified_by: :paramtype last_modified_by: ~flow.models.User :keyword last_modified_utc: :paramtype last_modified_utc: ~datetime.datetime :keyword duration: :paramtype duration: str :keyword inputs: Dictionary of :code:`<TypedAssetReference>`. :paramtype inputs: dict[str, ~flow.models.TypedAssetReference] :keyword outputs: Dictionary of :code:`<TypedAssetReference>`. :paramtype outputs: dict[str, ~flow.models.TypedAssetReference] :keyword current_attempt_id: :paramtype current_attempt_id: int """ super(RunDetailsDto, self).__init__(**kwargs) self.run_id = kwargs.get('run_id', None) self.run_uuid = kwargs.get('run_uuid', None) self.parent_run_uuid = kwargs.get('parent_run_uuid', None) self.root_run_uuid = kwargs.get('root_run_uuid', None) self.target = kwargs.get('target', None) self.status = kwargs.get('status', None) self.parent_run_id = kwargs.get('parent_run_id', None) self.data_container_id = kwargs.get('data_container_id', None) self.created_time_utc = kwargs.get('created_time_utc', None) self.start_time_utc = kwargs.get('start_time_utc', None) self.end_time_utc = kwargs.get('end_time_utc', None) self.error = kwargs.get('error', None) self.warnings = kwargs.get('warnings', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.parameters = kwargs.get('parameters', None) self.services = kwargs.get('services', None) self.input_datasets = kwargs.get('input_datasets', None) self.output_datasets = kwargs.get('output_datasets', None) self.run_definition = kwargs.get('run_definition', None) self.log_files = kwargs.get('log_files', None) self.job_cost = kwargs.get('job_cost', None) self.revision = kwargs.get('revision', None) self.run_type_v2 = kwargs.get('run_type_v2', None) self.settings = kwargs.get('settings', None) self.compute_request = kwargs.get('compute_request', None) self.compute = kwargs.get('compute', None) self.created_by = kwargs.get('created_by', None) self.compute_duration = kwargs.get('compute_duration', None) self.effective_start_time_utc = kwargs.get('effective_start_time_utc', None) self.run_number = kwargs.get('run_number', None) self.root_run_id = kwargs.get('root_run_id', None) self.experiment_id = kwargs.get('experiment_id', None) self.user_id = kwargs.get('user_id', None) self.status_revision = kwargs.get('status_revision', None) self.current_compute_time = kwargs.get('current_compute_time', None) self.last_start_time_utc = kwargs.get('last_start_time_utc', None) self.last_modified_by = kwargs.get('last_modified_by', None) self.last_modified_utc = kwargs.get('last_modified_utc', None) self.duration = kwargs.get('duration', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.current_attempt_id = kwargs.get('current_attempt_id', None) class RunDetailsWarningDto(msrest.serialization.Model): """RunDetailsWarningDto. :ivar source: :vartype source: str :ivar message: :vartype message: str """ _attribute_map = { 'source': {'key': 'source', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword source: :paramtype source: str :keyword message: :paramtype message: str """ super(RunDetailsWarningDto, self).__init__(**kwargs) self.source = kwargs.get('source', None) self.message = kwargs.get('message', None) class RunDto(msrest.serialization.Model): """RunDto. :ivar run_number: :vartype run_number: int :ivar root_run_id: :vartype root_run_id: str :ivar created_utc: :vartype created_utc: ~datetime.datetime :ivar created_by: :vartype created_by: ~flow.models.User :ivar user_id: :vartype user_id: str :ivar token: :vartype token: str :ivar token_expiry_time_utc: :vartype token_expiry_time_utc: ~datetime.datetime :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse :ivar warnings: :vartype warnings: list[~flow.models.RunDetailsWarningDto] :ivar revision: :vartype revision: long :ivar status_revision: :vartype status_revision: long :ivar run_uuid: :vartype run_uuid: str :ivar parent_run_uuid: :vartype parent_run_uuid: str :ivar root_run_uuid: :vartype root_run_uuid: str :ivar last_start_time_utc: :vartype last_start_time_utc: ~datetime.datetime :ivar current_compute_time: :vartype current_compute_time: str :ivar compute_duration: :vartype compute_duration: str :ivar effective_start_time_utc: :vartype effective_start_time_utc: ~datetime.datetime :ivar last_modified_by: :vartype last_modified_by: ~flow.models.User :ivar last_modified_utc: :vartype last_modified_utc: ~datetime.datetime :ivar duration: :vartype duration: str :ivar cancelation_reason: :vartype cancelation_reason: str :ivar current_attempt_id: :vartype current_attempt_id: int :ivar run_id: :vartype run_id: str :ivar parent_run_id: :vartype parent_run_id: str :ivar experiment_id: :vartype experiment_id: str :ivar status: :vartype status: str :ivar start_time_utc: :vartype start_time_utc: ~datetime.datetime :ivar end_time_utc: :vartype end_time_utc: ~datetime.datetime :ivar schedule_id: :vartype schedule_id: str :ivar display_name: :vartype display_name: str :ivar name: :vartype name: str :ivar data_container_id: :vartype data_container_id: str :ivar description: :vartype description: str :ivar hidden: :vartype hidden: bool :ivar run_type: :vartype run_type: str :ivar run_type_v2: :vartype run_type_v2: ~flow.models.RunTypeV2 :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar parameters: Dictionary of :code:`<any>`. :vartype parameters: dict[str, any] :ivar action_uris: Dictionary of :code:`<string>`. :vartype action_uris: dict[str, str] :ivar script_name: :vartype script_name: str :ivar target: :vartype target: str :ivar unique_child_run_compute_targets: :vartype unique_child_run_compute_targets: list[str] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar settings: Dictionary of :code:`<string>`. :vartype settings: dict[str, str] :ivar services: Dictionary of :code:`<EndpointSetting>`. :vartype services: dict[str, ~flow.models.EndpointSetting] :ivar input_datasets: :vartype input_datasets: list[~flow.models.DatasetLineage] :ivar output_datasets: :vartype output_datasets: list[~flow.models.OutputDatasetLineage] :ivar run_definition: Anything. :vartype run_definition: any :ivar job_specification: Anything. :vartype job_specification: any :ivar primary_metric_name: :vartype primary_metric_name: str :ivar created_from: :vartype created_from: ~flow.models.CreatedFromDto :ivar cancel_uri: :vartype cancel_uri: str :ivar complete_uri: :vartype complete_uri: str :ivar diagnostics_uri: :vartype diagnostics_uri: str :ivar compute_request: :vartype compute_request: ~flow.models.ComputeRequest :ivar compute: :vartype compute: ~flow.models.Compute :ivar retain_for_lifetime_of_workspace: :vartype retain_for_lifetime_of_workspace: bool :ivar queueing_info: :vartype queueing_info: ~flow.models.QueueingInfo :ivar inputs: Dictionary of :code:`<TypedAssetReference>`. :vartype inputs: dict[str, ~flow.models.TypedAssetReference] :ivar outputs: Dictionary of :code:`<TypedAssetReference>`. :vartype outputs: dict[str, ~flow.models.TypedAssetReference] """ _validation = { 'unique_child_run_compute_targets': {'unique': True}, 'input_datasets': {'unique': True}, 'output_datasets': {'unique': True}, } _attribute_map = { 'run_number': {'key': 'runNumber', 'type': 'int'}, 'root_run_id': {'key': 'rootRunId', 'type': 'str'}, 'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'}, 'created_by': {'key': 'createdBy', 'type': 'User'}, 'user_id': {'key': 'userId', 'type': 'str'}, 'token': {'key': 'token', 'type': 'str'}, 'token_expiry_time_utc': {'key': 'tokenExpiryTimeUtc', 'type': 'iso-8601'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, 'warnings': {'key': 'warnings', 'type': '[RunDetailsWarningDto]'}, 'revision': {'key': 'revision', 'type': 'long'}, 'status_revision': {'key': 'statusRevision', 'type': 'long'}, 'run_uuid': {'key': 'runUuid', 'type': 'str'}, 'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'}, 'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'}, 'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'}, 'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'}, 'compute_duration': {'key': 'computeDuration', 'type': 'str'}, 'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'}, 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'}, 'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'}, 'duration': {'key': 'duration', 'type': 'str'}, 'cancelation_reason': {'key': 'cancelationReason', 'type': 'str'}, 'current_attempt_id': {'key': 'currentAttemptId', 'type': 'int'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'}, 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'}, 'schedule_id': {'key': 'scheduleId', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'data_container_id': {'key': 'dataContainerId', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'hidden': {'key': 'hidden', 'type': 'bool'}, 'run_type': {'key': 'runType', 'type': 'str'}, 'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, 'action_uris': {'key': 'actionUris', 'type': '{str}'}, 'script_name': {'key': 'scriptName', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'settings': {'key': 'settings', 'type': '{str}'}, 'services': {'key': 'services', 'type': '{EndpointSetting}'}, 'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'}, 'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'}, 'run_definition': {'key': 'runDefinition', 'type': 'object'}, 'job_specification': {'key': 'jobSpecification', 'type': 'object'}, 'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'}, 'created_from': {'key': 'createdFrom', 'type': 'CreatedFromDto'}, 'cancel_uri': {'key': 'cancelUri', 'type': 'str'}, 'complete_uri': {'key': 'completeUri', 'type': 'str'}, 'diagnostics_uri': {'key': 'diagnosticsUri', 'type': 'str'}, 'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'}, 'compute': {'key': 'compute', 'type': 'Compute'}, 'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'}, 'queueing_info': {'key': 'queueingInfo', 'type': 'QueueingInfo'}, 'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'}, 'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'}, } def __init__( self, **kwargs ): """ :keyword run_number: :paramtype run_number: int :keyword root_run_id: :paramtype root_run_id: str :keyword created_utc: :paramtype created_utc: ~datetime.datetime :keyword created_by: :paramtype created_by: ~flow.models.User :keyword user_id: :paramtype user_id: str :keyword token: :paramtype token: str :keyword token_expiry_time_utc: :paramtype token_expiry_time_utc: ~datetime.datetime :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse :keyword warnings: :paramtype warnings: list[~flow.models.RunDetailsWarningDto] :keyword revision: :paramtype revision: long :keyword status_revision: :paramtype status_revision: long :keyword run_uuid: :paramtype run_uuid: str :keyword parent_run_uuid: :paramtype parent_run_uuid: str :keyword root_run_uuid: :paramtype root_run_uuid: str :keyword last_start_time_utc: :paramtype last_start_time_utc: ~datetime.datetime :keyword current_compute_time: :paramtype current_compute_time: str :keyword compute_duration: :paramtype compute_duration: str :keyword effective_start_time_utc: :paramtype effective_start_time_utc: ~datetime.datetime :keyword last_modified_by: :paramtype last_modified_by: ~flow.models.User :keyword last_modified_utc: :paramtype last_modified_utc: ~datetime.datetime :keyword duration: :paramtype duration: str :keyword cancelation_reason: :paramtype cancelation_reason: str :keyword current_attempt_id: :paramtype current_attempt_id: int :keyword run_id: :paramtype run_id: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword experiment_id: :paramtype experiment_id: str :keyword status: :paramtype status: str :keyword start_time_utc: :paramtype start_time_utc: ~datetime.datetime :keyword end_time_utc: :paramtype end_time_utc: ~datetime.datetime :keyword schedule_id: :paramtype schedule_id: str :keyword display_name: :paramtype display_name: str :keyword name: :paramtype name: str :keyword data_container_id: :paramtype data_container_id: str :keyword description: :paramtype description: str :keyword hidden: :paramtype hidden: bool :keyword run_type: :paramtype run_type: str :keyword run_type_v2: :paramtype run_type_v2: ~flow.models.RunTypeV2 :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword parameters: Dictionary of :code:`<any>`. :paramtype parameters: dict[str, any] :keyword action_uris: Dictionary of :code:`<string>`. :paramtype action_uris: dict[str, str] :keyword script_name: :paramtype script_name: str :keyword target: :paramtype target: str :keyword unique_child_run_compute_targets: :paramtype unique_child_run_compute_targets: list[str] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword settings: Dictionary of :code:`<string>`. :paramtype settings: dict[str, str] :keyword services: Dictionary of :code:`<EndpointSetting>`. :paramtype services: dict[str, ~flow.models.EndpointSetting] :keyword input_datasets: :paramtype input_datasets: list[~flow.models.DatasetLineage] :keyword output_datasets: :paramtype output_datasets: list[~flow.models.OutputDatasetLineage] :keyword run_definition: Anything. :paramtype run_definition: any :keyword job_specification: Anything. :paramtype job_specification: any :keyword primary_metric_name: :paramtype primary_metric_name: str :keyword created_from: :paramtype created_from: ~flow.models.CreatedFromDto :keyword cancel_uri: :paramtype cancel_uri: str :keyword complete_uri: :paramtype complete_uri: str :keyword diagnostics_uri: :paramtype diagnostics_uri: str :keyword compute_request: :paramtype compute_request: ~flow.models.ComputeRequest :keyword compute: :paramtype compute: ~flow.models.Compute :keyword retain_for_lifetime_of_workspace: :paramtype retain_for_lifetime_of_workspace: bool :keyword queueing_info: :paramtype queueing_info: ~flow.models.QueueingInfo :keyword inputs: Dictionary of :code:`<TypedAssetReference>`. :paramtype inputs: dict[str, ~flow.models.TypedAssetReference] :keyword outputs: Dictionary of :code:`<TypedAssetReference>`. :paramtype outputs: dict[str, ~flow.models.TypedAssetReference] """ super(RunDto, self).__init__(**kwargs) self.run_number = kwargs.get('run_number', None) self.root_run_id = kwargs.get('root_run_id', None) self.created_utc = kwargs.get('created_utc', None) self.created_by = kwargs.get('created_by', None) self.user_id = kwargs.get('user_id', None) self.token = kwargs.get('token', None) self.token_expiry_time_utc = kwargs.get('token_expiry_time_utc', None) self.error = kwargs.get('error', None) self.warnings = kwargs.get('warnings', None) self.revision = kwargs.get('revision', None) self.status_revision = kwargs.get('status_revision', None) self.run_uuid = kwargs.get('run_uuid', None) self.parent_run_uuid = kwargs.get('parent_run_uuid', None) self.root_run_uuid = kwargs.get('root_run_uuid', None) self.last_start_time_utc = kwargs.get('last_start_time_utc', None) self.current_compute_time = kwargs.get('current_compute_time', None) self.compute_duration = kwargs.get('compute_duration', None) self.effective_start_time_utc = kwargs.get('effective_start_time_utc', None) self.last_modified_by = kwargs.get('last_modified_by', None) self.last_modified_utc = kwargs.get('last_modified_utc', None) self.duration = kwargs.get('duration', None) self.cancelation_reason = kwargs.get('cancelation_reason', None) self.current_attempt_id = kwargs.get('current_attempt_id', None) self.run_id = kwargs.get('run_id', None) self.parent_run_id = kwargs.get('parent_run_id', None) self.experiment_id = kwargs.get('experiment_id', None) self.status = kwargs.get('status', None) self.start_time_utc = kwargs.get('start_time_utc', None) self.end_time_utc = kwargs.get('end_time_utc', None) self.schedule_id = kwargs.get('schedule_id', None) self.display_name = kwargs.get('display_name', None) self.name = kwargs.get('name', None) self.data_container_id = kwargs.get('data_container_id', None) self.description = kwargs.get('description', None) self.hidden = kwargs.get('hidden', None) self.run_type = kwargs.get('run_type', None) self.run_type_v2 = kwargs.get('run_type_v2', None) self.properties = kwargs.get('properties', None) self.parameters = kwargs.get('parameters', None) self.action_uris = kwargs.get('action_uris', None) self.script_name = kwargs.get('script_name', None) self.target = kwargs.get('target', None) self.unique_child_run_compute_targets = kwargs.get('unique_child_run_compute_targets', None) self.tags = kwargs.get('tags', None) self.settings = kwargs.get('settings', None) self.services = kwargs.get('services', None) self.input_datasets = kwargs.get('input_datasets', None) self.output_datasets = kwargs.get('output_datasets', None) self.run_definition = kwargs.get('run_definition', None) self.job_specification = kwargs.get('job_specification', None) self.primary_metric_name = kwargs.get('primary_metric_name', None) self.created_from = kwargs.get('created_from', None) self.cancel_uri = kwargs.get('cancel_uri', None) self.complete_uri = kwargs.get('complete_uri', None) self.diagnostics_uri = kwargs.get('diagnostics_uri', None) self.compute_request = kwargs.get('compute_request', None) self.compute = kwargs.get('compute', None) self.retain_for_lifetime_of_workspace = kwargs.get('retain_for_lifetime_of_workspace', None) self.queueing_info = kwargs.get('queueing_info', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) class RunIndexEntity(msrest.serialization.Model): """RunIndexEntity. Variables are only populated by the server, and will be ignored when sending a request. :ivar schema_id: :vartype schema_id: str :ivar entity_id: :vartype entity_id: str :ivar kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned". :vartype kind: str or ~flow.models.EntityKind :ivar annotations: :vartype annotations: ~flow.models.RunAnnotations :ivar properties: :vartype properties: ~flow.models.RunProperties :ivar internal: Any object. :vartype internal: any :ivar update_sequence: :vartype update_sequence: long :ivar type: :vartype type: str :ivar version: :vartype version: str :ivar entity_container_id: :vartype entity_container_id: str :ivar entity_object_id: :vartype entity_object_id: str :ivar resource_type: :vartype resource_type: str :ivar relationships: :vartype relationships: list[~flow.models.Relationship] :ivar asset_id: :vartype asset_id: str """ _validation = { 'version': {'readonly': True}, 'entity_container_id': {'readonly': True}, 'entity_object_id': {'readonly': True}, 'resource_type': {'readonly': True}, } _attribute_map = { 'schema_id': {'key': 'schemaId', 'type': 'str'}, 'entity_id': {'key': 'entityId', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': 'RunAnnotations'}, 'properties': {'key': 'properties', 'type': 'RunProperties'}, 'internal': {'key': 'internal', 'type': 'object'}, 'update_sequence': {'key': 'updateSequence', 'type': 'long'}, 'type': {'key': 'type', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'entity_container_id': {'key': 'entityContainerId', 'type': 'str'}, 'entity_object_id': {'key': 'entityObjectId', 'type': 'str'}, 'resource_type': {'key': 'resourceType', 'type': 'str'}, 'relationships': {'key': 'relationships', 'type': '[Relationship]'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword schema_id: :paramtype schema_id: str :keyword entity_id: :paramtype entity_id: str :keyword kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned". :paramtype kind: str or ~flow.models.EntityKind :keyword annotations: :paramtype annotations: ~flow.models.RunAnnotations :keyword properties: :paramtype properties: ~flow.models.RunProperties :keyword internal: Any object. :paramtype internal: any :keyword update_sequence: :paramtype update_sequence: long :keyword type: :paramtype type: str :keyword relationships: :paramtype relationships: list[~flow.models.Relationship] :keyword asset_id: :paramtype asset_id: str """ super(RunIndexEntity, self).__init__(**kwargs) self.schema_id = kwargs.get('schema_id', None) self.entity_id = kwargs.get('entity_id', None) self.kind = kwargs.get('kind', None) self.annotations = kwargs.get('annotations', None) self.properties = kwargs.get('properties', None) self.internal = kwargs.get('internal', None) self.update_sequence = kwargs.get('update_sequence', None) self.type = kwargs.get('type', None) self.version = None self.entity_container_id = None self.entity_object_id = None self.resource_type = None self.relationships = kwargs.get('relationships', None) self.asset_id = kwargs.get('asset_id', None) class RunIndexMetricSummary(msrest.serialization.Model): """RunIndexMetricSummary. :ivar count: :vartype count: long :ivar last_value: Anything. :vartype last_value: any :ivar minimum_value: Anything. :vartype minimum_value: any :ivar maximum_value: Anything. :vartype maximum_value: any :ivar metric_type: :vartype metric_type: str """ _attribute_map = { 'count': {'key': 'count', 'type': 'long'}, 'last_value': {'key': 'lastValue', 'type': 'object'}, 'minimum_value': {'key': 'minimumValue', 'type': 'object'}, 'maximum_value': {'key': 'maximumValue', 'type': 'object'}, 'metric_type': {'key': 'metricType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword count: :paramtype count: long :keyword last_value: Anything. :paramtype last_value: any :keyword minimum_value: Anything. :paramtype minimum_value: any :keyword maximum_value: Anything. :paramtype maximum_value: any :keyword metric_type: :paramtype metric_type: str """ super(RunIndexMetricSummary, self).__init__(**kwargs) self.count = kwargs.get('count', None) self.last_value = kwargs.get('last_value', None) self.minimum_value = kwargs.get('minimum_value', None) self.maximum_value = kwargs.get('maximum_value', None) self.metric_type = kwargs.get('metric_type', None) class RunIndexMetricSummarySystemObject(msrest.serialization.Model): """RunIndexMetricSummarySystemObject. :ivar count: :vartype count: long :ivar last_value: Anything. :vartype last_value: any :ivar minimum_value: Anything. :vartype minimum_value: any :ivar maximum_value: Anything. :vartype maximum_value: any :ivar metric_type: :vartype metric_type: str """ _attribute_map = { 'count': {'key': 'count', 'type': 'long'}, 'last_value': {'key': 'lastValue', 'type': 'object'}, 'minimum_value': {'key': 'minimumValue', 'type': 'object'}, 'maximum_value': {'key': 'maximumValue', 'type': 'object'}, 'metric_type': {'key': 'metricType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword count: :paramtype count: long :keyword last_value: Anything. :paramtype last_value: any :keyword minimum_value: Anything. :paramtype minimum_value: any :keyword maximum_value: Anything. :paramtype maximum_value: any :keyword metric_type: :paramtype metric_type: str """ super(RunIndexMetricSummarySystemObject, self).__init__(**kwargs) self.count = kwargs.get('count', None) self.last_value = kwargs.get('last_value', None) self.minimum_value = kwargs.get('minimum_value', None) self.maximum_value = kwargs.get('maximum_value', None) self.metric_type = kwargs.get('metric_type', None) class RunIndexResourceMetricSummary(msrest.serialization.Model): """RunIndexResourceMetricSummary. :ivar gpu_utilization_percent_last_hour: :vartype gpu_utilization_percent_last_hour: float :ivar gpu_memory_utilization_percent_last_hour: :vartype gpu_memory_utilization_percent_last_hour: float :ivar gpu_energy_joules: :vartype gpu_energy_joules: float :ivar resource_metric_names: :vartype resource_metric_names: list[str] """ _attribute_map = { 'gpu_utilization_percent_last_hour': {'key': 'gpuUtilizationPercentLastHour', 'type': 'float'}, 'gpu_memory_utilization_percent_last_hour': {'key': 'gpuMemoryUtilizationPercentLastHour', 'type': 'float'}, 'gpu_energy_joules': {'key': 'gpuEnergyJoules', 'type': 'float'}, 'resource_metric_names': {'key': 'resourceMetricNames', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword gpu_utilization_percent_last_hour: :paramtype gpu_utilization_percent_last_hour: float :keyword gpu_memory_utilization_percent_last_hour: :paramtype gpu_memory_utilization_percent_last_hour: float :keyword gpu_energy_joules: :paramtype gpu_energy_joules: float :keyword resource_metric_names: :paramtype resource_metric_names: list[str] """ super(RunIndexResourceMetricSummary, self).__init__(**kwargs) self.gpu_utilization_percent_last_hour = kwargs.get('gpu_utilization_percent_last_hour', None) self.gpu_memory_utilization_percent_last_hour = kwargs.get('gpu_memory_utilization_percent_last_hour', None) self.gpu_energy_joules = kwargs.get('gpu_energy_joules', None) self.resource_metric_names = kwargs.get('resource_metric_names', None) class RunMetricDto(msrest.serialization.Model): """RunMetricDto. :ivar run_id: :vartype run_id: str :ivar metric_id: :vartype metric_id: str :ivar data_container_id: :vartype data_container_id: str :ivar metric_type: :vartype metric_type: str :ivar created_utc: :vartype created_utc: ~datetime.datetime :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar label: :vartype label: str :ivar num_cells: :vartype num_cells: int :ivar data_location: :vartype data_location: str :ivar cells: :vartype cells: list[dict[str, any]] :ivar schema: :vartype schema: ~flow.models.MetricSchemaDto """ _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, 'metric_id': {'key': 'metricId', 'type': 'str'}, 'data_container_id': {'key': 'dataContainerId', 'type': 'str'}, 'metric_type': {'key': 'metricType', 'type': 'str'}, 'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'num_cells': {'key': 'numCells', 'type': 'int'}, 'data_location': {'key': 'dataLocation', 'type': 'str'}, 'cells': {'key': 'cells', 'type': '[{object}]'}, 'schema': {'key': 'schema', 'type': 'MetricSchemaDto'}, } def __init__( self, **kwargs ): """ :keyword run_id: :paramtype run_id: str :keyword metric_id: :paramtype metric_id: str :keyword data_container_id: :paramtype data_container_id: str :keyword metric_type: :paramtype metric_type: str :keyword created_utc: :paramtype created_utc: ~datetime.datetime :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword label: :paramtype label: str :keyword num_cells: :paramtype num_cells: int :keyword data_location: :paramtype data_location: str :keyword cells: :paramtype cells: list[dict[str, any]] :keyword schema: :paramtype schema: ~flow.models.MetricSchemaDto """ super(RunMetricDto, self).__init__(**kwargs) self.run_id = kwargs.get('run_id', None) self.metric_id = kwargs.get('metric_id', None) self.data_container_id = kwargs.get('data_container_id', None) self.metric_type = kwargs.get('metric_type', None) self.created_utc = kwargs.get('created_utc', None) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.label = kwargs.get('label', None) self.num_cells = kwargs.get('num_cells', None) self.data_location = kwargs.get('data_location', None) self.cells = kwargs.get('cells', None) self.schema = kwargs.get('schema', None) class RunMetricsTypesDto(msrest.serialization.Model): """RunMetricsTypesDto. :ivar name: :vartype name: str :ivar type: :vartype type: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: :paramtype type: str """ super(RunMetricsTypesDto, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.type = kwargs.get('type', None) class RunProperties(msrest.serialization.Model): """RunProperties. :ivar data_container_id: :vartype data_container_id: str :ivar target_name: :vartype target_name: str :ivar run_name: :vartype run_name: str :ivar experiment_name: :vartype experiment_name: str :ivar run_id: :vartype run_id: str :ivar parent_run_id: :vartype parent_run_id: str :ivar root_run_id: :vartype root_run_id: str :ivar run_type: :vartype run_type: str :ivar run_type_v2: :vartype run_type_v2: ~flow.models.RunTypeV2Index :ivar script_name: :vartype script_name: str :ivar experiment_id: :vartype experiment_id: str :ivar run_uuid: :vartype run_uuid: str :ivar parent_run_uuid: :vartype parent_run_uuid: str :ivar run_number: :vartype run_number: int :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar compute_request: :vartype compute_request: ~flow.models.ComputeRequest :ivar compute: :vartype compute: ~flow.models.Compute :ivar user_properties: This is a dictionary. :vartype user_properties: dict[str, str] :ivar action_uris: This is a dictionary. :vartype action_uris: dict[str, str] :ivar duration: :vartype duration: str :ivar duration_milliseconds: :vartype duration_milliseconds: float :ivar creation_context: :vartype creation_context: ~flow.models.CreationContext """ _attribute_map = { 'data_container_id': {'key': 'dataContainerId', 'type': 'str'}, 'target_name': {'key': 'targetName', 'type': 'str'}, 'run_name': {'key': 'runName', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'root_run_id': {'key': 'rootRunId', 'type': 'str'}, 'run_type': {'key': 'runType', 'type': 'str'}, 'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2Index'}, 'script_name': {'key': 'scriptName', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'run_uuid': {'key': 'runUuid', 'type': 'str'}, 'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'}, 'run_number': {'key': 'runNumber', 'type': 'int'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'}, 'compute': {'key': 'compute', 'type': 'Compute'}, 'user_properties': {'key': 'userProperties', 'type': '{str}'}, 'action_uris': {'key': 'actionUris', 'type': '{str}'}, 'duration': {'key': 'duration', 'type': 'str'}, 'duration_milliseconds': {'key': 'durationMilliseconds', 'type': 'float'}, 'creation_context': {'key': 'creationContext', 'type': 'CreationContext'}, } def __init__( self, **kwargs ): """ :keyword data_container_id: :paramtype data_container_id: str :keyword target_name: :paramtype target_name: str :keyword run_name: :paramtype run_name: str :keyword experiment_name: :paramtype experiment_name: str :keyword run_id: :paramtype run_id: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword root_run_id: :paramtype root_run_id: str :keyword run_type: :paramtype run_type: str :keyword run_type_v2: :paramtype run_type_v2: ~flow.models.RunTypeV2Index :keyword script_name: :paramtype script_name: str :keyword experiment_id: :paramtype experiment_id: str :keyword run_uuid: :paramtype run_uuid: str :keyword parent_run_uuid: :paramtype parent_run_uuid: str :keyword run_number: :paramtype run_number: int :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword compute_request: :paramtype compute_request: ~flow.models.ComputeRequest :keyword compute: :paramtype compute: ~flow.models.Compute :keyword user_properties: This is a dictionary. :paramtype user_properties: dict[str, str] :keyword action_uris: This is a dictionary. :paramtype action_uris: dict[str, str] :keyword duration: :paramtype duration: str :keyword duration_milliseconds: :paramtype duration_milliseconds: float :keyword creation_context: :paramtype creation_context: ~flow.models.CreationContext """ super(RunProperties, self).__init__(**kwargs) self.data_container_id = kwargs.get('data_container_id', None) self.target_name = kwargs.get('target_name', None) self.run_name = kwargs.get('run_name', None) self.experiment_name = kwargs.get('experiment_name', None) self.run_id = kwargs.get('run_id', None) self.parent_run_id = kwargs.get('parent_run_id', None) self.root_run_id = kwargs.get('root_run_id', None) self.run_type = kwargs.get('run_type', None) self.run_type_v2 = kwargs.get('run_type_v2', None) self.script_name = kwargs.get('script_name', None) self.experiment_id = kwargs.get('experiment_id', None) self.run_uuid = kwargs.get('run_uuid', None) self.parent_run_uuid = kwargs.get('parent_run_uuid', None) self.run_number = kwargs.get('run_number', None) self.start_time = kwargs.get('start_time', None) self.end_time = kwargs.get('end_time', None) self.compute_request = kwargs.get('compute_request', None) self.compute = kwargs.get('compute', None) self.user_properties = kwargs.get('user_properties', None) self.action_uris = kwargs.get('action_uris', None) self.duration = kwargs.get('duration', None) self.duration_milliseconds = kwargs.get('duration_milliseconds', None) self.creation_context = kwargs.get('creation_context', None) class RunSettingParameter(msrest.serialization.Model): """RunSettingParameter. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar parameter_type: Possible values include: "Undefined", "Int", "Double", "Bool", "String", "JsonString", "YamlString", "StringList". :vartype parameter_type: str or ~flow.models.RunSettingParameterType :ivar is_optional: :vartype is_optional: bool :ivar default_value: :vartype default_value: str :ivar lower_bound: :vartype lower_bound: str :ivar upper_bound: :vartype upper_bound: str :ivar description: :vartype description: str :ivar run_setting_ui_hint: :vartype run_setting_ui_hint: ~flow.models.RunSettingUIParameterHint :ivar argument_name: :vartype argument_name: str :ivar section_name: :vartype section_name: str :ivar section_description: :vartype section_description: str :ivar section_argument_name: :vartype section_argument_name: str :ivar examples: :vartype examples: list[str] :ivar enum_values: :vartype enum_values: list[str] :ivar enum_values_to_argument_strings: This is a dictionary. :vartype enum_values_to_argument_strings: dict[str, str] :ivar enabled_by_parameter_name: :vartype enabled_by_parameter_name: str :ivar enabled_by_parameter_values: :vartype enabled_by_parameter_values: list[str] :ivar disabled_by_parameters: :vartype disabled_by_parameters: list[str] :ivar module_run_setting_type: Possible values include: "All", "Released", "Default", "Testing", "Legacy", "Preview", "UxFull", "Integration", "UxIntegration", "Full". :vartype module_run_setting_type: str or ~flow.models.ModuleRunSettingTypes :ivar linked_parameter_default_value_mapping: Dictionary of :code:`<string>`. :vartype linked_parameter_default_value_mapping: dict[str, str] :ivar linked_parameter_key_name: :vartype linked_parameter_key_name: str :ivar support_link_setting: :vartype support_link_setting: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'parameter_type': {'key': 'parameterType', 'type': 'str'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, 'lower_bound': {'key': 'lowerBound', 'type': 'str'}, 'upper_bound': {'key': 'upperBound', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'run_setting_ui_hint': {'key': 'runSettingUIHint', 'type': 'RunSettingUIParameterHint'}, 'argument_name': {'key': 'argumentName', 'type': 'str'}, 'section_name': {'key': 'sectionName', 'type': 'str'}, 'section_description': {'key': 'sectionDescription', 'type': 'str'}, 'section_argument_name': {'key': 'sectionArgumentName', 'type': 'str'}, 'examples': {'key': 'examples', 'type': '[str]'}, 'enum_values': {'key': 'enumValues', 'type': '[str]'}, 'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'}, 'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'}, 'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'}, 'disabled_by_parameters': {'key': 'disabledByParameters', 'type': '[str]'}, 'module_run_setting_type': {'key': 'moduleRunSettingType', 'type': 'str'}, 'linked_parameter_default_value_mapping': {'key': 'linkedParameterDefaultValueMapping', 'type': '{str}'}, 'linked_parameter_key_name': {'key': 'linkedParameterKeyName', 'type': 'str'}, 'support_link_setting': {'key': 'supportLinkSetting', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword parameter_type: Possible values include: "Undefined", "Int", "Double", "Bool", "String", "JsonString", "YamlString", "StringList". :paramtype parameter_type: str or ~flow.models.RunSettingParameterType :keyword is_optional: :paramtype is_optional: bool :keyword default_value: :paramtype default_value: str :keyword lower_bound: :paramtype lower_bound: str :keyword upper_bound: :paramtype upper_bound: str :keyword description: :paramtype description: str :keyword run_setting_ui_hint: :paramtype run_setting_ui_hint: ~flow.models.RunSettingUIParameterHint :keyword argument_name: :paramtype argument_name: str :keyword section_name: :paramtype section_name: str :keyword section_description: :paramtype section_description: str :keyword section_argument_name: :paramtype section_argument_name: str :keyword examples: :paramtype examples: list[str] :keyword enum_values: :paramtype enum_values: list[str] :keyword enum_values_to_argument_strings: This is a dictionary. :paramtype enum_values_to_argument_strings: dict[str, str] :keyword enabled_by_parameter_name: :paramtype enabled_by_parameter_name: str :keyword enabled_by_parameter_values: :paramtype enabled_by_parameter_values: list[str] :keyword disabled_by_parameters: :paramtype disabled_by_parameters: list[str] :keyword module_run_setting_type: Possible values include: "All", "Released", "Default", "Testing", "Legacy", "Preview", "UxFull", "Integration", "UxIntegration", "Full". :paramtype module_run_setting_type: str or ~flow.models.ModuleRunSettingTypes :keyword linked_parameter_default_value_mapping: Dictionary of :code:`<string>`. :paramtype linked_parameter_default_value_mapping: dict[str, str] :keyword linked_parameter_key_name: :paramtype linked_parameter_key_name: str :keyword support_link_setting: :paramtype support_link_setting: bool """ super(RunSettingParameter, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.label = kwargs.get('label', None) self.parameter_type = kwargs.get('parameter_type', None) self.is_optional = kwargs.get('is_optional', None) self.default_value = kwargs.get('default_value', None) self.lower_bound = kwargs.get('lower_bound', None) self.upper_bound = kwargs.get('upper_bound', None) self.description = kwargs.get('description', None) self.run_setting_ui_hint = kwargs.get('run_setting_ui_hint', None) self.argument_name = kwargs.get('argument_name', None) self.section_name = kwargs.get('section_name', None) self.section_description = kwargs.get('section_description', None) self.section_argument_name = kwargs.get('section_argument_name', None) self.examples = kwargs.get('examples', None) self.enum_values = kwargs.get('enum_values', None) self.enum_values_to_argument_strings = kwargs.get('enum_values_to_argument_strings', None) self.enabled_by_parameter_name = kwargs.get('enabled_by_parameter_name', None) self.enabled_by_parameter_values = kwargs.get('enabled_by_parameter_values', None) self.disabled_by_parameters = kwargs.get('disabled_by_parameters', None) self.module_run_setting_type = kwargs.get('module_run_setting_type', None) self.linked_parameter_default_value_mapping = kwargs.get('linked_parameter_default_value_mapping', None) self.linked_parameter_key_name = kwargs.get('linked_parameter_key_name', None) self.support_link_setting = kwargs.get('support_link_setting', None) class RunSettingParameterAssignment(msrest.serialization.Model): """RunSettingParameterAssignment. :ivar use_graph_default_compute: :vartype use_graph_default_compute: bool :ivar mlc_compute_type: :vartype mlc_compute_type: str :ivar compute_run_settings: :vartype compute_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar linked_parameter_name: :vartype linked_parameter_name: str :ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition". :vartype value_type: str or ~flow.models.ParameterValueType :ivar assignments_to_concatenate: :vartype assignments_to_concatenate: list[~flow.models.ParameterAssignment] :ivar data_path_assignment: :vartype data_path_assignment: ~flow.models.LegacyDataPath :ivar data_set_definition_value_assignment: :vartype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue :ivar name: :vartype name: str :ivar value: :vartype value: str """ _attribute_map = { 'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'}, 'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'}, 'compute_run_settings': {'key': 'computeRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'linked_parameter_name': {'key': 'linkedParameterName', 'type': 'str'}, 'value_type': {'key': 'valueType', 'type': 'str'}, 'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[ParameterAssignment]'}, 'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'LegacyDataPath'}, 'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'DataSetDefinitionValue'}, 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword use_graph_default_compute: :paramtype use_graph_default_compute: bool :keyword mlc_compute_type: :paramtype mlc_compute_type: str :keyword compute_run_settings: :paramtype compute_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword linked_parameter_name: :paramtype linked_parameter_name: str :keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition". :paramtype value_type: str or ~flow.models.ParameterValueType :keyword assignments_to_concatenate: :paramtype assignments_to_concatenate: list[~flow.models.ParameterAssignment] :keyword data_path_assignment: :paramtype data_path_assignment: ~flow.models.LegacyDataPath :keyword data_set_definition_value_assignment: :paramtype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue :keyword name: :paramtype name: str :keyword value: :paramtype value: str """ super(RunSettingParameterAssignment, self).__init__(**kwargs) self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None) self.mlc_compute_type = kwargs.get('mlc_compute_type', None) self.compute_run_settings = kwargs.get('compute_run_settings', None) self.linked_parameter_name = kwargs.get('linked_parameter_name', None) self.value_type = kwargs.get('value_type', None) self.assignments_to_concatenate = kwargs.get('assignments_to_concatenate', None) self.data_path_assignment = kwargs.get('data_path_assignment', None) self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None) self.name = kwargs.get('name', None) self.value = kwargs.get('value', None) class RunSettingUIParameterHint(msrest.serialization.Model): """RunSettingUIParameterHint. :ivar ui_widget_type: Possible values include: "Default", "ComputeSelection", "JsonEditor", "Mode", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "Checkbox", "MultipleSelection", "HyperparameterConfiguration", "JsonTextBox", "Connection", "Static". :vartype ui_widget_type: str or ~flow.models.RunSettingUIWidgetTypeEnum :ivar json_editor: :vartype json_editor: ~flow.models.UIJsonEditor :ivar yaml_editor: :vartype yaml_editor: ~flow.models.UIYamlEditor :ivar compute_selection: :vartype compute_selection: ~flow.models.UIComputeSelection :ivar hyperparameter_configuration: :vartype hyperparameter_configuration: ~flow.models.UIHyperparameterConfiguration :ivar ux_ignore: :vartype ux_ignore: bool :ivar anonymous: :vartype anonymous: bool :ivar support_reset: :vartype support_reset: bool """ _attribute_map = { 'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'}, 'json_editor': {'key': 'jsonEditor', 'type': 'UIJsonEditor'}, 'yaml_editor': {'key': 'yamlEditor', 'type': 'UIYamlEditor'}, 'compute_selection': {'key': 'computeSelection', 'type': 'UIComputeSelection'}, 'hyperparameter_configuration': {'key': 'hyperparameterConfiguration', 'type': 'UIHyperparameterConfiguration'}, 'ux_ignore': {'key': 'uxIgnore', 'type': 'bool'}, 'anonymous': {'key': 'anonymous', 'type': 'bool'}, 'support_reset': {'key': 'supportReset', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword ui_widget_type: Possible values include: "Default", "ComputeSelection", "JsonEditor", "Mode", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "Checkbox", "MultipleSelection", "HyperparameterConfiguration", "JsonTextBox", "Connection", "Static". :paramtype ui_widget_type: str or ~flow.models.RunSettingUIWidgetTypeEnum :keyword json_editor: :paramtype json_editor: ~flow.models.UIJsonEditor :keyword yaml_editor: :paramtype yaml_editor: ~flow.models.UIYamlEditor :keyword compute_selection: :paramtype compute_selection: ~flow.models.UIComputeSelection :keyword hyperparameter_configuration: :paramtype hyperparameter_configuration: ~flow.models.UIHyperparameterConfiguration :keyword ux_ignore: :paramtype ux_ignore: bool :keyword anonymous: :paramtype anonymous: bool :keyword support_reset: :paramtype support_reset: bool """ super(RunSettingUIParameterHint, self).__init__(**kwargs) self.ui_widget_type = kwargs.get('ui_widget_type', None) self.json_editor = kwargs.get('json_editor', None) self.yaml_editor = kwargs.get('yaml_editor', None) self.compute_selection = kwargs.get('compute_selection', None) self.hyperparameter_configuration = kwargs.get('hyperparameter_configuration', None) self.ux_ignore = kwargs.get('ux_ignore', None) self.anonymous = kwargs.get('anonymous', None) self.support_reset = kwargs.get('support_reset', None) class RunStatusPeriod(msrest.serialization.Model): """RunStatusPeriod. :ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype status: str or ~flow.models.RunStatus :ivar sub_periods: :vartype sub_periods: list[~flow.models.SubStatusPeriod] :ivar start: :vartype start: long :ivar end: :vartype end: long """ _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'sub_periods': {'key': 'subPeriods', 'type': '[SubStatusPeriod]'}, 'start': {'key': 'start', 'type': 'long'}, 'end': {'key': 'end', 'type': 'long'}, } def __init__( self, **kwargs ): """ :keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype status: str or ~flow.models.RunStatus :keyword sub_periods: :paramtype sub_periods: list[~flow.models.SubStatusPeriod] :keyword start: :paramtype start: long :keyword end: :paramtype end: long """ super(RunStatusPeriod, self).__init__(**kwargs) self.status = kwargs.get('status', None) self.sub_periods = kwargs.get('sub_periods', None) self.start = kwargs.get('start', None) self.end = kwargs.get('end', None) class RuntimeConfiguration(msrest.serialization.Model): """RuntimeConfiguration. :ivar base_image: :vartype base_image: str :ivar version: :vartype version: str """ _attribute_map = { 'base_image': {'key': 'baseImage', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword base_image: :paramtype base_image: str :keyword version: :paramtype version: str """ super(RuntimeConfiguration, self).__init__(**kwargs) self.base_image = kwargs.get('base_image', None) self.version = kwargs.get('version', None) class RunTypeV2(msrest.serialization.Model): """RunTypeV2. :ivar orchestrator: :vartype orchestrator: str :ivar traits: :vartype traits: list[str] :ivar attribution: :vartype attribution: str :ivar compute_type: :vartype compute_type: str """ _validation = { 'traits': {'unique': True}, } _attribute_map = { 'orchestrator': {'key': 'orchestrator', 'type': 'str'}, 'traits': {'key': 'traits', 'type': '[str]'}, 'attribution': {'key': 'attribution', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword orchestrator: :paramtype orchestrator: str :keyword traits: :paramtype traits: list[str] :keyword attribution: :paramtype attribution: str :keyword compute_type: :paramtype compute_type: str """ super(RunTypeV2, self).__init__(**kwargs) self.orchestrator = kwargs.get('orchestrator', None) self.traits = kwargs.get('traits', None) self.attribution = kwargs.get('attribution', None) self.compute_type = kwargs.get('compute_type', None) class RunTypeV2Index(msrest.serialization.Model): """RunTypeV2Index. :ivar orchestrator: :vartype orchestrator: str :ivar traits: Dictionary of :code:`<string>`. :vartype traits: dict[str, str] :ivar attribution: :vartype attribution: str :ivar compute_type: :vartype compute_type: str """ _attribute_map = { 'orchestrator': {'key': 'orchestrator', 'type': 'str'}, 'traits': {'key': 'traits', 'type': '{str}'}, 'attribution': {'key': 'attribution', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword orchestrator: :paramtype orchestrator: str :keyword traits: Dictionary of :code:`<string>`. :paramtype traits: dict[str, str] :keyword attribution: :paramtype attribution: str :keyword compute_type: :paramtype compute_type: str """ super(RunTypeV2Index, self).__init__(**kwargs) self.orchestrator = kwargs.get('orchestrator', None) self.traits = kwargs.get('traits', None) self.attribution = kwargs.get('attribution', None) self.compute_type = kwargs.get('compute_type', None) class SampleMeta(msrest.serialization.Model): """SampleMeta. :ivar image: :vartype image: str :ivar id: :vartype id: str :ivar display_name: :vartype display_name: str :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar doc_link: :vartype doc_link: str :ivar tags: A set of tags. :vartype tags: list[str] :ivar created_at: :vartype created_at: ~datetime.datetime :ivar updated_at: :vartype updated_at: ~datetime.datetime :ivar feed_name: :vartype feed_name: str :ivar version: :vartype version: str """ _attribute_map = { 'image': {'key': 'image', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'doc_link': {'key': 'docLink', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '[str]'}, 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, 'updated_at': {'key': 'updatedAt', 'type': 'iso-8601'}, 'feed_name': {'key': 'feedName', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword image: :paramtype image: str :keyword id: :paramtype id: str :keyword display_name: :paramtype display_name: str :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword doc_link: :paramtype doc_link: str :keyword tags: A set of tags. :paramtype tags: list[str] :keyword created_at: :paramtype created_at: ~datetime.datetime :keyword updated_at: :paramtype updated_at: ~datetime.datetime :keyword feed_name: :paramtype feed_name: str :keyword version: :paramtype version: str """ super(SampleMeta, self).__init__(**kwargs) self.image = kwargs.get('image', None) self.id = kwargs.get('id', None) self.display_name = kwargs.get('display_name', None) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.doc_link = kwargs.get('doc_link', None) self.tags = kwargs.get('tags', None) self.created_at = kwargs.get('created_at', None) self.updated_at = kwargs.get('updated_at', None) self.feed_name = kwargs.get('feed_name', None) self.version = kwargs.get('version', None) class SavedDataSetReference(msrest.serialization.Model): """SavedDataSetReference. :ivar id: :vartype id: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str """ super(SavedDataSetReference, self).__init__(**kwargs) self.id = kwargs.get('id', None) class SavePipelineDraftRequest(msrest.serialization.Model): """SavePipelineDraftRequest. :ivar ui_widget_meta_infos: :vartype ui_widget_meta_infos: list[~flow.models.UIWidgetMetaInfo] :ivar web_service_inputs: :vartype web_service_inputs: list[~flow.models.WebServicePort] :ivar web_service_outputs: :vartype web_service_outputs: list[~flow.models.WebServicePort] :ivar nodes_in_draft: :vartype nodes_in_draft: list[str] :ivar name: :vartype name: str :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :vartype graph_components_mode: str or ~flow.models.GraphComponentsMode :ivar sub_pipelines_info: :vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo :ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar graph: :vartype graph: ~flow.models.GraphDraftEntity :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar description: :vartype description: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ _attribute_map = { 'ui_widget_meta_infos': {'key': 'uiWidgetMetaInfos', 'type': '[UIWidgetMetaInfo]'}, 'web_service_inputs': {'key': 'webServiceInputs', 'type': '[WebServicePort]'}, 'web_service_outputs': {'key': 'webServiceOutputs', 'type': '[WebServicePort]'}, 'nodes_in_draft': {'key': 'nodesInDraft', 'type': '[str]'}, 'name': {'key': 'name', 'type': 'str'}, 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'}, 'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'}, 'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'}, 'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'graph': {'key': 'graph', 'type': 'GraphDraftEntity'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword ui_widget_meta_infos: :paramtype ui_widget_meta_infos: list[~flow.models.UIWidgetMetaInfo] :keyword web_service_inputs: :paramtype web_service_inputs: list[~flow.models.WebServicePort] :keyword web_service_outputs: :paramtype web_service_outputs: list[~flow.models.WebServicePort] :keyword nodes_in_draft: :paramtype nodes_in_draft: list[str] :keyword name: :paramtype name: str :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode :keyword sub_pipelines_info: :paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo :keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword graph: :paramtype graph: ~flow.models.GraphDraftEntity :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword description: :paramtype description: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ super(SavePipelineDraftRequest, self).__init__(**kwargs) self.ui_widget_meta_infos = kwargs.get('ui_widget_meta_infos', None) self.web_service_inputs = kwargs.get('web_service_inputs', None) self.web_service_outputs = kwargs.get('web_service_outputs', None) self.nodes_in_draft = kwargs.get('nodes_in_draft', None) self.name = kwargs.get('name', None) self.pipeline_type = kwargs.get('pipeline_type', None) self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None) self.graph_components_mode = kwargs.get('graph_components_mode', None) self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None) self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None) self.pipeline_parameters = kwargs.get('pipeline_parameters', None) self.data_path_assignments = kwargs.get('data_path_assignments', None) self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None) self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None) self.graph = kwargs.get('graph', None) self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None) self.module_node_run_settings = kwargs.get('module_node_run_settings', None) self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None) self.tags = kwargs.get('tags', None) self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None) self.description = kwargs.get('description', None) self.properties = kwargs.get('properties', None) self.enforce_rerun = kwargs.get('enforce_rerun', None) self.dataset_access_modes = kwargs.get('dataset_access_modes', None) class ScheduleBase(msrest.serialization.Model): """ScheduleBase. :ivar schedule_status: Possible values include: "Enabled", "Disabled". :vartype schedule_status: str or ~flow.models.MfeInternalScheduleStatus :ivar schedule_type: Possible values include: "Cron", "Recurrence". :vartype schedule_type: str or ~flow.models.ScheduleType :ivar end_time: :vartype end_time: ~datetime.datetime :ivar start_time: :vartype start_time: ~datetime.datetime :ivar time_zone: :vartype time_zone: str :ivar expression: :vartype expression: str :ivar frequency: Possible values include: "Minute", "Hour", "Day", "Week", "Month". :vartype frequency: str or ~flow.models.RecurrenceFrequency :ivar interval: :vartype interval: int :ivar pattern: :vartype pattern: ~flow.models.RecurrencePattern """ _attribute_map = { 'schedule_status': {'key': 'scheduleStatus', 'type': 'str'}, 'schedule_type': {'key': 'scheduleType', 'type': 'str'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'time_zone': {'key': 'timeZone', 'type': 'str'}, 'expression': {'key': 'expression', 'type': 'str'}, 'frequency': {'key': 'frequency', 'type': 'str'}, 'interval': {'key': 'interval', 'type': 'int'}, 'pattern': {'key': 'pattern', 'type': 'RecurrencePattern'}, } def __init__( self, **kwargs ): """ :keyword schedule_status: Possible values include: "Enabled", "Disabled". :paramtype schedule_status: str or ~flow.models.MfeInternalScheduleStatus :keyword schedule_type: Possible values include: "Cron", "Recurrence". :paramtype schedule_type: str or ~flow.models.ScheduleType :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword time_zone: :paramtype time_zone: str :keyword expression: :paramtype expression: str :keyword frequency: Possible values include: "Minute", "Hour", "Day", "Week", "Month". :paramtype frequency: str or ~flow.models.RecurrenceFrequency :keyword interval: :paramtype interval: int :keyword pattern: :paramtype pattern: ~flow.models.RecurrencePattern """ super(ScheduleBase, self).__init__(**kwargs) self.schedule_status = kwargs.get('schedule_status', None) self.schedule_type = kwargs.get('schedule_type', None) self.end_time = kwargs.get('end_time', None) self.start_time = kwargs.get('start_time', None) self.time_zone = kwargs.get('time_zone', None) self.expression = kwargs.get('expression', None) self.frequency = kwargs.get('frequency', None) self.interval = kwargs.get('interval', None) self.pattern = kwargs.get('pattern', None) class SchemaContractsCreatedBy(msrest.serialization.Model): """SchemaContractsCreatedBy. :ivar user_object_id: :vartype user_object_id: str :ivar user_tenant_id: :vartype user_tenant_id: str :ivar user_name: :vartype user_name: str :ivar user_principal_name: :vartype user_principal_name: str """ _attribute_map = { 'user_object_id': {'key': 'userObjectId', 'type': 'str'}, 'user_tenant_id': {'key': 'userTenantId', 'type': 'str'}, 'user_name': {'key': 'userName', 'type': 'str'}, 'user_principal_name': {'key': 'userPrincipalName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword user_object_id: :paramtype user_object_id: str :keyword user_tenant_id: :paramtype user_tenant_id: str :keyword user_name: :paramtype user_name: str :keyword user_principal_name: :paramtype user_principal_name: str """ super(SchemaContractsCreatedBy, self).__init__(**kwargs) self.user_object_id = kwargs.get('user_object_id', None) self.user_tenant_id = kwargs.get('user_tenant_id', None) self.user_name = kwargs.get('user_name', None) self.user_principal_name = kwargs.get('user_principal_name', None) class ScopeCloudConfiguration(msrest.serialization.Model): """ScopeCloudConfiguration. :ivar input_path_suffixes: This is a dictionary. :vartype input_path_suffixes: dict[str, ~flow.models.ArgumentAssignment] :ivar output_path_suffixes: This is a dictionary. :vartype output_path_suffixes: dict[str, ~flow.models.ArgumentAssignment] :ivar user_alias: :vartype user_alias: str :ivar tokens: :vartype tokens: int :ivar auto_token: :vartype auto_token: int :ivar vcp: :vartype vcp: float """ _attribute_map = { 'input_path_suffixes': {'key': 'inputPathSuffixes', 'type': '{ArgumentAssignment}'}, 'output_path_suffixes': {'key': 'outputPathSuffixes', 'type': '{ArgumentAssignment}'}, 'user_alias': {'key': 'userAlias', 'type': 'str'}, 'tokens': {'key': 'tokens', 'type': 'int'}, 'auto_token': {'key': 'autoToken', 'type': 'int'}, 'vcp': {'key': 'vcp', 'type': 'float'}, } def __init__( self, **kwargs ): """ :keyword input_path_suffixes: This is a dictionary. :paramtype input_path_suffixes: dict[str, ~flow.models.ArgumentAssignment] :keyword output_path_suffixes: This is a dictionary. :paramtype output_path_suffixes: dict[str, ~flow.models.ArgumentAssignment] :keyword user_alias: :paramtype user_alias: str :keyword tokens: :paramtype tokens: int :keyword auto_token: :paramtype auto_token: int :keyword vcp: :paramtype vcp: float """ super(ScopeCloudConfiguration, self).__init__(**kwargs) self.input_path_suffixes = kwargs.get('input_path_suffixes', None) self.output_path_suffixes = kwargs.get('output_path_suffixes', None) self.user_alias = kwargs.get('user_alias', None) self.tokens = kwargs.get('tokens', None) self.auto_token = kwargs.get('auto_token', None) self.vcp = kwargs.get('vcp', None) class Seasonality(msrest.serialization.Model): """Seasonality. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.SeasonalityMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.SeasonalityMode :keyword value: :paramtype value: int """ super(Seasonality, self).__init__(**kwargs) self.mode = kwargs.get('mode', None) self.value = kwargs.get('value', None) class SecretConfiguration(msrest.serialization.Model): """SecretConfiguration. :ivar workspace_secret_name: :vartype workspace_secret_name: str :ivar uri: :vartype uri: str """ _attribute_map = { 'workspace_secret_name': {'key': 'workspace_secret_name', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword workspace_secret_name: :paramtype workspace_secret_name: str :keyword uri: :paramtype uri: str """ super(SecretConfiguration, self).__init__(**kwargs) self.workspace_secret_name = kwargs.get('workspace_secret_name', None) self.uri = kwargs.get('uri', None) class SegmentedResult1(msrest.serialization.Model): """SegmentedResult1. :ivar value: :vartype value: list[~flow.models.FlowIndexEntity] :ivar continuation_token: :vartype continuation_token: str :ivar count: :vartype count: int :ivar next_link: :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[FlowIndexEntity]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'count': {'key': 'count', 'type': 'int'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword value: :paramtype value: list[~flow.models.FlowIndexEntity] :keyword continuation_token: :paramtype continuation_token: str :keyword count: :paramtype count: int :keyword next_link: :paramtype next_link: str """ super(SegmentedResult1, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.continuation_token = kwargs.get('continuation_token', None) self.count = kwargs.get('count', None) self.next_link = kwargs.get('next_link', None) class ServiceLogRequest(msrest.serialization.Model): """ServiceLogRequest. :ivar log_level: Possible values include: "Trace", "Debug", "Information", "Warning", "Error", "Critical", "None". :vartype log_level: str or ~flow.models.LogLevel :ivar message: :vartype message: str :ivar timestamp: :vartype timestamp: ~datetime.datetime """ _attribute_map = { 'log_level': {'key': 'logLevel', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword log_level: Possible values include: "Trace", "Debug", "Information", "Warning", "Error", "Critical", "None". :paramtype log_level: str or ~flow.models.LogLevel :keyword message: :paramtype message: str :keyword timestamp: :paramtype timestamp: ~datetime.datetime """ super(ServiceLogRequest, self).__init__(**kwargs) self.log_level = kwargs.get('log_level', None) self.message = kwargs.get('message', None) self.timestamp = kwargs.get('timestamp', None) class SessionApplication(msrest.serialization.Model): """SessionApplication. :ivar image: :vartype image: str :ivar env_vars: Dictionary of :code:`<string>`. :vartype env_vars: dict[str, str] :ivar python_pip_requirements: :vartype python_pip_requirements: list[str] :ivar setup_results: :vartype setup_results: list[~flow.models.SessionApplicationRunCommandResult] """ _attribute_map = { 'image': {'key': 'image', 'type': 'str'}, 'env_vars': {'key': 'envVars', 'type': '{str}'}, 'python_pip_requirements': {'key': 'pythonPipRequirements', 'type': '[str]'}, 'setup_results': {'key': 'setupResults', 'type': '[SessionApplicationRunCommandResult]'}, } def __init__( self, **kwargs ): """ :keyword image: :paramtype image: str :keyword env_vars: Dictionary of :code:`<string>`. :paramtype env_vars: dict[str, str] :keyword python_pip_requirements: :paramtype python_pip_requirements: list[str] :keyword setup_results: :paramtype setup_results: list[~flow.models.SessionApplicationRunCommandResult] """ super(SessionApplication, self).__init__(**kwargs) self.image = kwargs.get('image', None) self.env_vars = kwargs.get('env_vars', None) self.python_pip_requirements = kwargs.get('python_pip_requirements', None) self.setup_results = kwargs.get('setup_results', None) class SessionApplicationRunCommandResult(msrest.serialization.Model): """SessionApplicationRunCommandResult. :ivar command: :vartype command: str :ivar arguments: :vartype arguments: list[str] :ivar exit_code: :vartype exit_code: int :ivar std_out: :vartype std_out: str :ivar std_err: :vartype std_err: str """ _attribute_map = { 'command': {'key': 'command', 'type': 'str'}, 'arguments': {'key': 'arguments', 'type': '[str]'}, 'exit_code': {'key': 'exitCode', 'type': 'int'}, 'std_out': {'key': 'stdOut', 'type': 'str'}, 'std_err': {'key': 'stdErr', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword command: :paramtype command: str :keyword arguments: :paramtype arguments: list[str] :keyword exit_code: :paramtype exit_code: int :keyword std_out: :paramtype std_out: str :keyword std_err: :paramtype std_err: str """ super(SessionApplicationRunCommandResult, self).__init__(**kwargs) self.command = kwargs.get('command', None) self.arguments = kwargs.get('arguments', None) self.exit_code = kwargs.get('exit_code', None) self.std_out = kwargs.get('std_out', None) self.std_err = kwargs.get('std_err', None) class SessionProperties(msrest.serialization.Model): """SessionProperties. :ivar session_id: :vartype session_id: str :ivar subscription_id: :vartype subscription_id: str :ivar resource_group_name: :vartype resource_group_name: str :ivar workspace_name: :vartype workspace_name: str :ivar user_object_id: :vartype user_object_id: str :ivar user_tenant_id: :vartype user_tenant_id: str :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar application: :vartype application: ~flow.models.SessionApplication :ivar last_alive_time: :vartype last_alive_time: ~datetime.datetime """ _attribute_map = { 'session_id': {'key': 'sessionId', 'type': 'str'}, 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, 'resource_group_name': {'key': 'resourceGroupName', 'type': 'str'}, 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, 'user_object_id': {'key': 'userObjectId', 'type': 'str'}, 'user_tenant_id': {'key': 'userTenantId', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'application': {'key': 'application', 'type': 'SessionApplication'}, 'last_alive_time': {'key': 'lastAliveTime', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword session_id: :paramtype session_id: str :keyword subscription_id: :paramtype subscription_id: str :keyword resource_group_name: :paramtype resource_group_name: str :keyword workspace_name: :paramtype workspace_name: str :keyword user_object_id: :paramtype user_object_id: str :keyword user_tenant_id: :paramtype user_tenant_id: str :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword application: :paramtype application: ~flow.models.SessionApplication :keyword last_alive_time: :paramtype last_alive_time: ~datetime.datetime """ super(SessionProperties, self).__init__(**kwargs) self.session_id = kwargs.get('session_id', None) self.subscription_id = kwargs.get('subscription_id', None) self.resource_group_name = kwargs.get('resource_group_name', None) self.workspace_name = kwargs.get('workspace_name', None) self.user_object_id = kwargs.get('user_object_id', None) self.user_tenant_id = kwargs.get('user_tenant_id', None) self.vm_size = kwargs.get('vm_size', None) self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None) self.application = kwargs.get('application', None) self.last_alive_time = kwargs.get('last_alive_time', None) class SetupFlowSessionRequest(msrest.serialization.Model): """SetupFlowSessionRequest. :ivar action: Possible values include: "Install", "Reset", "Update", "Delete". :vartype action: str or ~flow.models.SetupFlowSessionAction :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar identity: :vartype identity: str """ _attribute_map = { 'action': {'key': 'action', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword action: Possible values include: "Install", "Reset", "Update", "Delete". :paramtype action: str or ~flow.models.SetupFlowSessionAction :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword identity: :paramtype identity: str """ super(SetupFlowSessionRequest, self).__init__(**kwargs) self.action = kwargs.get('action', None) self.vm_size = kwargs.get('vm_size', None) self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None) self.identity = kwargs.get('identity', None) class SharingScope(msrest.serialization.Model): """SharingScope. :ivar type: Possible values include: "Global", "Tenant", "Subscription", "ResourceGroup", "Workspace". :vartype type: str or ~flow.models.ScopeType :ivar identifier: :vartype identifier: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'identifier': {'key': 'identifier', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword type: Possible values include: "Global", "Tenant", "Subscription", "ResourceGroup", "Workspace". :paramtype type: str or ~flow.models.ScopeType :keyword identifier: :paramtype identifier: str """ super(SharingScope, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.identifier = kwargs.get('identifier', None) class Snapshot(msrest.serialization.Model): """Snapshot. :ivar id: :vartype id: str :ivar directory_name: :vartype directory_name: str :ivar snapshot_asset_id: :vartype snapshot_asset_id: str :ivar snapshot_entity_id: :vartype snapshot_entity_id: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'directory_name': {'key': 'directoryName', 'type': 'str'}, 'snapshot_asset_id': {'key': 'snapshotAssetId', 'type': 'str'}, 'snapshot_entity_id': {'key': 'snapshotEntityId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword directory_name: :paramtype directory_name: str :keyword snapshot_asset_id: :paramtype snapshot_asset_id: str :keyword snapshot_entity_id: :paramtype snapshot_entity_id: str """ super(Snapshot, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.directory_name = kwargs.get('directory_name', None) self.snapshot_asset_id = kwargs.get('snapshot_asset_id', None) self.snapshot_entity_id = kwargs.get('snapshot_entity_id', None) class SnapshotInfo(msrest.serialization.Model): """SnapshotInfo. :ivar root_download_url: :vartype root_download_url: str :ivar snapshots: This is a dictionary. :vartype snapshots: dict[str, ~flow.models.DownloadResourceInfo] """ _attribute_map = { 'root_download_url': {'key': 'rootDownloadUrl', 'type': 'str'}, 'snapshots': {'key': 'snapshots', 'type': '{DownloadResourceInfo}'}, } def __init__( self, **kwargs ): """ :keyword root_download_url: :paramtype root_download_url: str :keyword snapshots: This is a dictionary. :paramtype snapshots: dict[str, ~flow.models.DownloadResourceInfo] """ super(SnapshotInfo, self).__init__(**kwargs) self.root_download_url = kwargs.get('root_download_url', None) self.snapshots = kwargs.get('snapshots', None) class SourceCodeDataReference(msrest.serialization.Model): """SourceCodeDataReference. :ivar data_store_name: :vartype data_store_name: str :ivar path: :vartype path: str """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'path': {'key': 'path', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword path: :paramtype path: str """ super(SourceCodeDataReference, self).__init__(**kwargs) self.data_store_name = kwargs.get('data_store_name', None) self.path = kwargs.get('path', None) class SparkConfiguration(msrest.serialization.Model): """SparkConfiguration. :ivar configuration: Dictionary of :code:`<string>`. :vartype configuration: dict[str, str] :ivar files: :vartype files: list[str] :ivar archives: :vartype archives: list[str] :ivar jars: :vartype jars: list[str] :ivar py_files: :vartype py_files: list[str] :ivar spark_pool_resource_id: :vartype spark_pool_resource_id: str """ _attribute_map = { 'configuration': {'key': 'configuration', 'type': '{str}'}, 'files': {'key': 'files', 'type': '[str]'}, 'archives': {'key': 'archives', 'type': '[str]'}, 'jars': {'key': 'jars', 'type': '[str]'}, 'py_files': {'key': 'pyFiles', 'type': '[str]'}, 'spark_pool_resource_id': {'key': 'sparkPoolResourceId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword configuration: Dictionary of :code:`<string>`. :paramtype configuration: dict[str, str] :keyword files: :paramtype files: list[str] :keyword archives: :paramtype archives: list[str] :keyword jars: :paramtype jars: list[str] :keyword py_files: :paramtype py_files: list[str] :keyword spark_pool_resource_id: :paramtype spark_pool_resource_id: str """ super(SparkConfiguration, self).__init__(**kwargs) self.configuration = kwargs.get('configuration', None) self.files = kwargs.get('files', None) self.archives = kwargs.get('archives', None) self.jars = kwargs.get('jars', None) self.py_files = kwargs.get('py_files', None) self.spark_pool_resource_id = kwargs.get('spark_pool_resource_id', None) class SparkJarTaskDto(msrest.serialization.Model): """SparkJarTaskDto. :ivar main_class_name: :vartype main_class_name: str :ivar parameters: :vartype parameters: list[str] """ _attribute_map = { 'main_class_name': {'key': 'main_class_name', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword main_class_name: :paramtype main_class_name: str :keyword parameters: :paramtype parameters: list[str] """ super(SparkJarTaskDto, self).__init__(**kwargs) self.main_class_name = kwargs.get('main_class_name', None) self.parameters = kwargs.get('parameters', None) class SparkJob(msrest.serialization.Model): """SparkJob. :ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :vartype job_type: str or ~flow.models.JobType :ivar resources: :vartype resources: ~flow.models.SparkResourceConfiguration :ivar args: :vartype args: str :ivar code_id: :vartype code_id: str :ivar entry: :vartype entry: ~flow.models.SparkJobEntry :ivar py_files: :vartype py_files: list[str] :ivar jars: :vartype jars: list[str] :ivar files: :vartype files: list[str] :ivar archives: :vartype archives: list[str] :ivar environment_id: :vartype environment_id: str :ivar input_data_bindings: Dictionary of :code:`<InputDataBinding>`. :vartype input_data_bindings: dict[str, ~flow.models.InputDataBinding] :ivar output_data_bindings: Dictionary of :code:`<OutputDataBinding>`. :vartype output_data_bindings: dict[str, ~flow.models.OutputDataBinding] :ivar conf: Dictionary of :code:`<string>`. :vartype conf: dict[str, str] :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled", "InProgress". :vartype provisioning_state: str or ~flow.models.JobProvisioningState :ivar parent_job_name: :vartype parent_job_name: str :ivar display_name: :vartype display_name: str :ivar experiment_name: :vartype experiment_name: str :ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". :vartype status: str or ~flow.models.JobStatus :ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`. :vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint] :ivar identity: :vartype identity: ~flow.models.MfeInternalIdentityConfiguration :ivar compute: :vartype compute: ~flow.models.ComputeConfiguration :ivar priority: :vartype priority: int :ivar output: :vartype output: ~flow.models.JobOutputArtifacts :ivar is_archived: :vartype is_archived: bool :ivar schedule: :vartype schedule: ~flow.models.ScheduleBase :ivar component_id: :vartype component_id: str :ivar notification_setting: :vartype notification_setting: ~flow.models.NotificationSetting :ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`. :vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration] :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'job_type': {'key': 'jobType', 'type': 'str'}, 'resources': {'key': 'resources', 'type': 'SparkResourceConfiguration'}, 'args': {'key': 'args', 'type': 'str'}, 'code_id': {'key': 'codeId', 'type': 'str'}, 'entry': {'key': 'entry', 'type': 'SparkJobEntry'}, 'py_files': {'key': 'pyFiles', 'type': '[str]'}, 'jars': {'key': 'jars', 'type': '[str]'}, 'files': {'key': 'files', 'type': '[str]'}, 'archives': {'key': 'archives', 'type': '[str]'}, 'environment_id': {'key': 'environmentId', 'type': 'str'}, 'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'}, 'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'}, 'conf': {'key': 'conf', 'type': '{str}'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'parent_job_name': {'key': 'parentJobName', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, 'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'}, 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, 'priority': {'key': 'priority', 'type': 'int'}, 'output': {'key': 'output', 'type': 'JobOutputArtifacts'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'schedule': {'key': 'schedule', 'type': 'ScheduleBase'}, 'component_id': {'key': 'componentId', 'type': 'str'}, 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :paramtype job_type: str or ~flow.models.JobType :keyword resources: :paramtype resources: ~flow.models.SparkResourceConfiguration :keyword args: :paramtype args: str :keyword code_id: :paramtype code_id: str :keyword entry: :paramtype entry: ~flow.models.SparkJobEntry :keyword py_files: :paramtype py_files: list[str] :keyword jars: :paramtype jars: list[str] :keyword files: :paramtype files: list[str] :keyword archives: :paramtype archives: list[str] :keyword environment_id: :paramtype environment_id: str :keyword input_data_bindings: Dictionary of :code:`<InputDataBinding>`. :paramtype input_data_bindings: dict[str, ~flow.models.InputDataBinding] :keyword output_data_bindings: Dictionary of :code:`<OutputDataBinding>`. :paramtype output_data_bindings: dict[str, ~flow.models.OutputDataBinding] :keyword conf: Dictionary of :code:`<string>`. :paramtype conf: dict[str, str] :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled", "InProgress". :paramtype provisioning_state: str or ~flow.models.JobProvisioningState :keyword parent_job_name: :paramtype parent_job_name: str :keyword display_name: :paramtype display_name: str :keyword experiment_name: :paramtype experiment_name: str :keyword status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". :paramtype status: str or ~flow.models.JobStatus :keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`. :paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint] :keyword identity: :paramtype identity: ~flow.models.MfeInternalIdentityConfiguration :keyword compute: :paramtype compute: ~flow.models.ComputeConfiguration :keyword priority: :paramtype priority: int :keyword output: :paramtype output: ~flow.models.JobOutputArtifacts :keyword is_archived: :paramtype is_archived: bool :keyword schedule: :paramtype schedule: ~flow.models.ScheduleBase :keyword component_id: :paramtype component_id: str :keyword notification_setting: :paramtype notification_setting: ~flow.models.NotificationSetting :keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`. :paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration] :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(SparkJob, self).__init__(**kwargs) self.job_type = kwargs.get('job_type', None) self.resources = kwargs.get('resources', None) self.args = kwargs.get('args', None) self.code_id = kwargs.get('code_id', None) self.entry = kwargs.get('entry', None) self.py_files = kwargs.get('py_files', None) self.jars = kwargs.get('jars', None) self.files = kwargs.get('files', None) self.archives = kwargs.get('archives', None) self.environment_id = kwargs.get('environment_id', None) self.input_data_bindings = kwargs.get('input_data_bindings', None) self.output_data_bindings = kwargs.get('output_data_bindings', None) self.conf = kwargs.get('conf', None) self.environment_variables = kwargs.get('environment_variables', None) self.provisioning_state = kwargs.get('provisioning_state', None) self.parent_job_name = kwargs.get('parent_job_name', None) self.display_name = kwargs.get('display_name', None) self.experiment_name = kwargs.get('experiment_name', None) self.status = kwargs.get('status', None) self.interaction_endpoints = kwargs.get('interaction_endpoints', None) self.identity = kwargs.get('identity', None) self.compute = kwargs.get('compute', None) self.priority = kwargs.get('priority', None) self.output = kwargs.get('output', None) self.is_archived = kwargs.get('is_archived', None) self.schedule = kwargs.get('schedule', None) self.component_id = kwargs.get('component_id', None) self.notification_setting = kwargs.get('notification_setting', None) self.secrets_configuration = kwargs.get('secrets_configuration', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) class SparkJobEntry(msrest.serialization.Model): """SparkJobEntry. :ivar file: :vartype file: str :ivar class_name: :vartype class_name: str """ _attribute_map = { 'file': {'key': 'file', 'type': 'str'}, 'class_name': {'key': 'className', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword file: :paramtype file: str :keyword class_name: :paramtype class_name: str """ super(SparkJobEntry, self).__init__(**kwargs) self.file = kwargs.get('file', None) self.class_name = kwargs.get('class_name', None) class SparkMavenPackage(msrest.serialization.Model): """SparkMavenPackage. :ivar group: :vartype group: str :ivar artifact: :vartype artifact: str :ivar version: :vartype version: str """ _attribute_map = { 'group': {'key': 'group', 'type': 'str'}, 'artifact': {'key': 'artifact', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword group: :paramtype group: str :keyword artifact: :paramtype artifact: str :keyword version: :paramtype version: str """ super(SparkMavenPackage, self).__init__(**kwargs) self.group = kwargs.get('group', None) self.artifact = kwargs.get('artifact', None) self.version = kwargs.get('version', None) class SparkPythonTaskDto(msrest.serialization.Model): """SparkPythonTaskDto. :ivar python_file: :vartype python_file: str :ivar parameters: :vartype parameters: list[str] """ _attribute_map = { 'python_file': {'key': 'python_file', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword python_file: :paramtype python_file: str :keyword parameters: :paramtype parameters: list[str] """ super(SparkPythonTaskDto, self).__init__(**kwargs) self.python_file = kwargs.get('python_file', None) self.parameters = kwargs.get('parameters', None) class SparkResourceConfiguration(msrest.serialization.Model): """SparkResourceConfiguration. :ivar instance_type: :vartype instance_type: str :ivar runtime_version: :vartype runtime_version: str """ _attribute_map = { 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword instance_type: :paramtype instance_type: str :keyword runtime_version: :paramtype runtime_version: str """ super(SparkResourceConfiguration, self).__init__(**kwargs) self.instance_type = kwargs.get('instance_type', None) self.runtime_version = kwargs.get('runtime_version', None) class SparkSection(msrest.serialization.Model): """SparkSection. :ivar repositories: :vartype repositories: list[str] :ivar packages: :vartype packages: list[~flow.models.SparkMavenPackage] :ivar precache_packages: :vartype precache_packages: bool """ _attribute_map = { 'repositories': {'key': 'repositories', 'type': '[str]'}, 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'}, 'precache_packages': {'key': 'precachePackages', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword repositories: :paramtype repositories: list[str] :keyword packages: :paramtype packages: list[~flow.models.SparkMavenPackage] :keyword precache_packages: :paramtype precache_packages: bool """ super(SparkSection, self).__init__(**kwargs) self.repositories = kwargs.get('repositories', None) self.packages = kwargs.get('packages', None) self.precache_packages = kwargs.get('precache_packages', None) class SparkSubmitTaskDto(msrest.serialization.Model): """SparkSubmitTaskDto. :ivar parameters: :vartype parameters: list[str] """ _attribute_map = { 'parameters': {'key': 'parameters', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword parameters: :paramtype parameters: list[str] """ super(SparkSubmitTaskDto, self).__init__(**kwargs) self.parameters = kwargs.get('parameters', None) class SqlDataPath(msrest.serialization.Model): """SqlDataPath. :ivar sql_table_name: :vartype sql_table_name: str :ivar sql_query: :vartype sql_query: str :ivar sql_stored_procedure_name: :vartype sql_stored_procedure_name: str :ivar sql_stored_procedure_params: :vartype sql_stored_procedure_params: list[~flow.models.StoredProcedureParameter] """ _attribute_map = { 'sql_table_name': {'key': 'sqlTableName', 'type': 'str'}, 'sql_query': {'key': 'sqlQuery', 'type': 'str'}, 'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'}, 'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[StoredProcedureParameter]'}, } def __init__( self, **kwargs ): """ :keyword sql_table_name: :paramtype sql_table_name: str :keyword sql_query: :paramtype sql_query: str :keyword sql_stored_procedure_name: :paramtype sql_stored_procedure_name: str :keyword sql_stored_procedure_params: :paramtype sql_stored_procedure_params: list[~flow.models.StoredProcedureParameter] """ super(SqlDataPath, self).__init__(**kwargs) self.sql_table_name = kwargs.get('sql_table_name', None) self.sql_query = kwargs.get('sql_query', None) self.sql_stored_procedure_name = kwargs.get('sql_stored_procedure_name', None) self.sql_stored_procedure_params = kwargs.get('sql_stored_procedure_params', None) class StackEnsembleSettings(msrest.serialization.Model): """StackEnsembleSettings. :ivar stack_meta_learner_type: Possible values include: "None", "LogisticRegression", "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", "LightGBMRegressor", "LinearRegression". :vartype stack_meta_learner_type: str or ~flow.models.StackMetaLearnerType :ivar stack_meta_learner_train_percentage: :vartype stack_meta_learner_train_percentage: float :ivar stack_meta_learner_k_wargs: Anything. :vartype stack_meta_learner_k_wargs: any """ _attribute_map = { 'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'}, 'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'}, 'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'}, } def __init__( self, **kwargs ): """ :keyword stack_meta_learner_type: Possible values include: "None", "LogisticRegression", "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", "LightGBMRegressor", "LinearRegression". :paramtype stack_meta_learner_type: str or ~flow.models.StackMetaLearnerType :keyword stack_meta_learner_train_percentage: :paramtype stack_meta_learner_train_percentage: float :keyword stack_meta_learner_k_wargs: Anything. :paramtype stack_meta_learner_k_wargs: any """ super(StackEnsembleSettings, self).__init__(**kwargs) self.stack_meta_learner_type = kwargs.get('stack_meta_learner_type', None) self.stack_meta_learner_train_percentage = kwargs.get('stack_meta_learner_train_percentage', None) self.stack_meta_learner_k_wargs = kwargs.get('stack_meta_learner_k_wargs', None) class StandbyPoolProperties(msrest.serialization.Model): """StandbyPoolProperties. :ivar name: :vartype name: str :ivar count: :vartype count: int :ivar vm_size: :vartype vm_size: str :ivar standby_available_instances: :vartype standby_available_instances: list[~flow.models.StandbyPoolResourceStatus] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'count': {'key': 'count', 'type': 'int'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'standby_available_instances': {'key': 'standbyAvailableInstances', 'type': '[StandbyPoolResourceStatus]'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword count: :paramtype count: int :keyword vm_size: :paramtype vm_size: str :keyword standby_available_instances: :paramtype standby_available_instances: list[~flow.models.StandbyPoolResourceStatus] """ super(StandbyPoolProperties, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.count = kwargs.get('count', None) self.vm_size = kwargs.get('vm_size', None) self.standby_available_instances = kwargs.get('standby_available_instances', None) class StandbyPoolResourceStatus(msrest.serialization.Model): """StandbyPoolResourceStatus. :ivar status: :vartype status: str :ivar error: :vartype error: ~flow.models.CloudError """ _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'error': {'key': 'error', 'type': 'CloudError'}, } def __init__( self, **kwargs ): """ :keyword status: :paramtype status: str :keyword error: :paramtype error: ~flow.models.CloudError """ super(StandbyPoolResourceStatus, self).__init__(**kwargs) self.status = kwargs.get('status', None) self.error = kwargs.get('error', None) class StartRunResult(msrest.serialization.Model): """StartRunResult. All required parameters must be populated in order to send to Azure. :ivar run_id: Required. :vartype run_id: str """ _validation = { 'run_id': {'required': True, 'min_length': 1}, } _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword run_id: Required. :paramtype run_id: str """ super(StartRunResult, self).__init__(**kwargs) self.run_id = kwargs['run_id'] class StepRunProfile(msrest.serialization.Model): """StepRunProfile. :ivar step_run_id: :vartype step_run_id: str :ivar step_run_number: :vartype step_run_number: int :ivar run_url: :vartype run_url: str :ivar compute_target: :vartype compute_target: str :ivar compute_target_url: :vartype compute_target_url: str :ivar node_id: :vartype node_id: str :ivar node_name: :vartype node_name: str :ivar step_name: :vartype step_name: str :ivar create_time: :vartype create_time: long :ivar start_time: :vartype start_time: long :ivar end_time: :vartype end_time: long :ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype status: str or ~flow.models.RunStatus :ivar status_detail: :vartype status_detail: str :ivar is_reused: :vartype is_reused: bool :ivar reused_pipeline_run_id: :vartype reused_pipeline_run_id: str :ivar reused_step_run_id: :vartype reused_step_run_id: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar status_timeline: :vartype status_timeline: list[~flow.models.RunStatusPeriod] """ _attribute_map = { 'step_run_id': {'key': 'stepRunId', 'type': 'str'}, 'step_run_number': {'key': 'stepRunNumber', 'type': 'int'}, 'run_url': {'key': 'runUrl', 'type': 'str'}, 'compute_target': {'key': 'computeTarget', 'type': 'str'}, 'compute_target_url': {'key': 'computeTargetUrl', 'type': 'str'}, 'node_id': {'key': 'nodeId', 'type': 'str'}, 'node_name': {'key': 'nodeName', 'type': 'str'}, 'step_name': {'key': 'stepName', 'type': 'str'}, 'create_time': {'key': 'createTime', 'type': 'long'}, 'start_time': {'key': 'startTime', 'type': 'long'}, 'end_time': {'key': 'endTime', 'type': 'long'}, 'status': {'key': 'status', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'is_reused': {'key': 'isReused', 'type': 'bool'}, 'reused_pipeline_run_id': {'key': 'reusedPipelineRunId', 'type': 'str'}, 'reused_step_run_id': {'key': 'reusedStepRunId', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'status_timeline': {'key': 'statusTimeline', 'type': '[RunStatusPeriod]'}, } def __init__( self, **kwargs ): """ :keyword step_run_id: :paramtype step_run_id: str :keyword step_run_number: :paramtype step_run_number: int :keyword run_url: :paramtype run_url: str :keyword compute_target: :paramtype compute_target: str :keyword compute_target_url: :paramtype compute_target_url: str :keyword node_id: :paramtype node_id: str :keyword node_name: :paramtype node_name: str :keyword step_name: :paramtype step_name: str :keyword create_time: :paramtype create_time: long :keyword start_time: :paramtype start_time: long :keyword end_time: :paramtype end_time: long :keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype status: str or ~flow.models.RunStatus :keyword status_detail: :paramtype status_detail: str :keyword is_reused: :paramtype is_reused: bool :keyword reused_pipeline_run_id: :paramtype reused_pipeline_run_id: str :keyword reused_step_run_id: :paramtype reused_step_run_id: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword status_timeline: :paramtype status_timeline: list[~flow.models.RunStatusPeriod] """ super(StepRunProfile, self).__init__(**kwargs) self.step_run_id = kwargs.get('step_run_id', None) self.step_run_number = kwargs.get('step_run_number', None) self.run_url = kwargs.get('run_url', None) self.compute_target = kwargs.get('compute_target', None) self.compute_target_url = kwargs.get('compute_target_url', None) self.node_id = kwargs.get('node_id', None) self.node_name = kwargs.get('node_name', None) self.step_name = kwargs.get('step_name', None) self.create_time = kwargs.get('create_time', None) self.start_time = kwargs.get('start_time', None) self.end_time = kwargs.get('end_time', None) self.status = kwargs.get('status', None) self.status_detail = kwargs.get('status_detail', None) self.is_reused = kwargs.get('is_reused', None) self.reused_pipeline_run_id = kwargs.get('reused_pipeline_run_id', None) self.reused_step_run_id = kwargs.get('reused_step_run_id', None) self.tags = kwargs.get('tags', None) self.status_timeline = kwargs.get('status_timeline', None) class StorageInfo(msrest.serialization.Model): """StorageInfo. :ivar storage_auth_type: Possible values include: "MSI", "ConnectionString", "SAS". :vartype storage_auth_type: str or ~flow.models.StorageAuthType :ivar connection_string: :vartype connection_string: str :ivar sas_token: :vartype sas_token: str :ivar account_name: :vartype account_name: str """ _attribute_map = { 'storage_auth_type': {'key': 'storageAuthType', 'type': 'str'}, 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'sas_token': {'key': 'sasToken', 'type': 'str'}, 'account_name': {'key': 'accountName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword storage_auth_type: Possible values include: "MSI", "ConnectionString", "SAS". :paramtype storage_auth_type: str or ~flow.models.StorageAuthType :keyword connection_string: :paramtype connection_string: str :keyword sas_token: :paramtype sas_token: str :keyword account_name: :paramtype account_name: str """ super(StorageInfo, self).__init__(**kwargs) self.storage_auth_type = kwargs.get('storage_auth_type', None) self.connection_string = kwargs.get('connection_string', None) self.sas_token = kwargs.get('sas_token', None) self.account_name = kwargs.get('account_name', None) class StoredProcedureParameter(msrest.serialization.Model): """StoredProcedureParameter. :ivar name: :vartype name: str :ivar value: :vartype value: str :ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date". :vartype type: str or ~flow.models.StoredProcedureParameterType """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword value: :paramtype value: str :keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date". :paramtype type: str or ~flow.models.StoredProcedureParameterType """ super(StoredProcedureParameter, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.value = kwargs.get('value', None) self.type = kwargs.get('type', None) class Stream(msrest.serialization.Model): """Stream. Variables are only populated by the server, and will be ignored when sending a request. :ivar can_read: :vartype can_read: bool :ivar can_write: :vartype can_write: bool :ivar can_seek: :vartype can_seek: bool :ivar can_timeout: :vartype can_timeout: bool :ivar length: :vartype length: long :ivar position: :vartype position: long :ivar read_timeout: :vartype read_timeout: int :ivar write_timeout: :vartype write_timeout: int """ _validation = { 'can_read': {'readonly': True}, 'can_write': {'readonly': True}, 'can_seek': {'readonly': True}, 'can_timeout': {'readonly': True}, 'length': {'readonly': True}, } _attribute_map = { 'can_read': {'key': 'canRead', 'type': 'bool'}, 'can_write': {'key': 'canWrite', 'type': 'bool'}, 'can_seek': {'key': 'canSeek', 'type': 'bool'}, 'can_timeout': {'key': 'canTimeout', 'type': 'bool'}, 'length': {'key': 'length', 'type': 'long'}, 'position': {'key': 'position', 'type': 'long'}, 'read_timeout': {'key': 'readTimeout', 'type': 'int'}, 'write_timeout': {'key': 'writeTimeout', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword position: :paramtype position: long :keyword read_timeout: :paramtype read_timeout: int :keyword write_timeout: :paramtype write_timeout: int """ super(Stream, self).__init__(**kwargs) self.can_read = None self.can_write = None self.can_seek = None self.can_timeout = None self.length = None self.position = kwargs.get('position', None) self.read_timeout = kwargs.get('read_timeout', None) self.write_timeout = kwargs.get('write_timeout', None) class StructuredInterface(msrest.serialization.Model): """StructuredInterface. :ivar command_line_pattern: :vartype command_line_pattern: str :ivar inputs: :vartype inputs: list[~flow.models.StructuredInterfaceInput] :ivar outputs: :vartype outputs: list[~flow.models.StructuredInterfaceOutput] :ivar control_outputs: :vartype control_outputs: list[~flow.models.ControlOutput] :ivar parameters: :vartype parameters: list[~flow.models.StructuredInterfaceParameter] :ivar metadata_parameters: :vartype metadata_parameters: list[~flow.models.StructuredInterfaceParameter] :ivar arguments: :vartype arguments: list[~flow.models.ArgumentAssignment] """ _attribute_map = { 'command_line_pattern': {'key': 'commandLinePattern', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '[StructuredInterfaceInput]'}, 'outputs': {'key': 'outputs', 'type': '[StructuredInterfaceOutput]'}, 'control_outputs': {'key': 'controlOutputs', 'type': '[ControlOutput]'}, 'parameters': {'key': 'parameters', 'type': '[StructuredInterfaceParameter]'}, 'metadata_parameters': {'key': 'metadataParameters', 'type': '[StructuredInterfaceParameter]'}, 'arguments': {'key': 'arguments', 'type': '[ArgumentAssignment]'}, } def __init__( self, **kwargs ): """ :keyword command_line_pattern: :paramtype command_line_pattern: str :keyword inputs: :paramtype inputs: list[~flow.models.StructuredInterfaceInput] :keyword outputs: :paramtype outputs: list[~flow.models.StructuredInterfaceOutput] :keyword control_outputs: :paramtype control_outputs: list[~flow.models.ControlOutput] :keyword parameters: :paramtype parameters: list[~flow.models.StructuredInterfaceParameter] :keyword metadata_parameters: :paramtype metadata_parameters: list[~flow.models.StructuredInterfaceParameter] :keyword arguments: :paramtype arguments: list[~flow.models.ArgumentAssignment] """ super(StructuredInterface, self).__init__(**kwargs) self.command_line_pattern = kwargs.get('command_line_pattern', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.control_outputs = kwargs.get('control_outputs', None) self.parameters = kwargs.get('parameters', None) self.metadata_parameters = kwargs.get('metadata_parameters', None) self.arguments = kwargs.get('arguments', None) class StructuredInterfaceInput(msrest.serialization.Model): """StructuredInterfaceInput. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar data_type_ids_list: :vartype data_type_ids_list: list[str] :ivar is_optional: :vartype is_optional: bool :ivar description: :vartype description: str :ivar skip_processing: :vartype skip_processing: bool :ivar is_resource: :vartype is_resource: bool :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool :ivar data_reference_name: :vartype data_reference_name: str :ivar dataset_types: :vartype dataset_types: list[str or ~flow.models.DatasetType] """ _validation = { 'dataset_types': {'unique': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'data_type_ids_list': {'key': 'dataTypeIdsList', 'type': '[str]'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'skip_processing': {'key': 'skipProcessing', 'type': 'bool'}, 'is_resource': {'key': 'isResource', 'type': 'bool'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'}, 'dataset_types': {'key': 'datasetTypes', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword data_type_ids_list: :paramtype data_type_ids_list: list[str] :keyword is_optional: :paramtype is_optional: bool :keyword description: :paramtype description: str :keyword skip_processing: :paramtype skip_processing: bool :keyword is_resource: :paramtype is_resource: bool :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool :keyword data_reference_name: :paramtype data_reference_name: str :keyword dataset_types: :paramtype dataset_types: list[str or ~flow.models.DatasetType] """ super(StructuredInterfaceInput, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.label = kwargs.get('label', None) self.data_type_ids_list = kwargs.get('data_type_ids_list', None) self.is_optional = kwargs.get('is_optional', None) self.description = kwargs.get('description', None) self.skip_processing = kwargs.get('skip_processing', None) self.is_resource = kwargs.get('is_resource', None) self.data_store_mode = kwargs.get('data_store_mode', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.overwrite = kwargs.get('overwrite', None) self.data_reference_name = kwargs.get('data_reference_name', None) self.dataset_types = kwargs.get('dataset_types', None) class StructuredInterfaceOutput(msrest.serialization.Model): """StructuredInterfaceOutput. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar data_type_id: :vartype data_type_id: str :ivar pass_through_data_type_input_name: :vartype pass_through_data_type_input_name: str :ivar description: :vartype description: str :ivar skip_processing: :vartype skip_processing: bool :ivar is_artifact: :vartype is_artifact: bool :ivar data_store_name: :vartype data_store_name: str :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool :ivar data_reference_name: :vartype data_reference_name: str :ivar training_output: :vartype training_output: ~flow.models.TrainingOutput :ivar dataset_output: :vartype dataset_output: ~flow.models.DatasetOutput :ivar asset_output_settings: :vartype asset_output_settings: ~flow.models.AssetOutputSettings :ivar early_available: :vartype early_available: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'data_type_id': {'key': 'dataTypeId', 'type': 'str'}, 'pass_through_data_type_input_name': {'key': 'passThroughDataTypeInputName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'skip_processing': {'key': 'skipProcessing', 'type': 'bool'}, 'is_artifact': {'key': 'IsArtifact', 'type': 'bool'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'}, 'training_output': {'key': 'trainingOutput', 'type': 'TrainingOutput'}, 'dataset_output': {'key': 'datasetOutput', 'type': 'DatasetOutput'}, 'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AssetOutputSettings'}, 'early_available': {'key': 'EarlyAvailable', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword data_type_id: :paramtype data_type_id: str :keyword pass_through_data_type_input_name: :paramtype pass_through_data_type_input_name: str :keyword description: :paramtype description: str :keyword skip_processing: :paramtype skip_processing: bool :keyword is_artifact: :paramtype is_artifact: bool :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool :keyword data_reference_name: :paramtype data_reference_name: str :keyword training_output: :paramtype training_output: ~flow.models.TrainingOutput :keyword dataset_output: :paramtype dataset_output: ~flow.models.DatasetOutput :keyword asset_output_settings: :paramtype asset_output_settings: ~flow.models.AssetOutputSettings :keyword early_available: :paramtype early_available: bool """ super(StructuredInterfaceOutput, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.label = kwargs.get('label', None) self.data_type_id = kwargs.get('data_type_id', None) self.pass_through_data_type_input_name = kwargs.get('pass_through_data_type_input_name', None) self.description = kwargs.get('description', None) self.skip_processing = kwargs.get('skip_processing', None) self.is_artifact = kwargs.get('is_artifact', None) self.data_store_name = kwargs.get('data_store_name', None) self.data_store_mode = kwargs.get('data_store_mode', None) self.path_on_compute = kwargs.get('path_on_compute', None) self.overwrite = kwargs.get('overwrite', None) self.data_reference_name = kwargs.get('data_reference_name', None) self.training_output = kwargs.get('training_output', None) self.dataset_output = kwargs.get('dataset_output', None) self.asset_output_settings = kwargs.get('asset_output_settings', None) self.early_available = kwargs.get('early_available', None) class StructuredInterfaceParameter(msrest.serialization.Model): """StructuredInterfaceParameter. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined". :vartype parameter_type: str or ~flow.models.ParameterType :ivar is_optional: :vartype is_optional: bool :ivar default_value: :vartype default_value: str :ivar lower_bound: :vartype lower_bound: str :ivar upper_bound: :vartype upper_bound: str :ivar enum_values: :vartype enum_values: list[str] :ivar enum_values_to_argument_strings: This is a dictionary. :vartype enum_values_to_argument_strings: dict[str, str] :ivar description: :vartype description: str :ivar set_environment_variable: :vartype set_environment_variable: bool :ivar environment_variable_override: :vartype environment_variable_override: str :ivar enabled_by_parameter_name: :vartype enabled_by_parameter_name: str :ivar enabled_by_parameter_values: :vartype enabled_by_parameter_values: list[str] :ivar ui_hint: :vartype ui_hint: ~flow.models.UIParameterHint :ivar group_names: :vartype group_names: list[str] :ivar argument_name: :vartype argument_name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'parameter_type': {'key': 'parameterType', 'type': 'str'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, 'lower_bound': {'key': 'lowerBound', 'type': 'str'}, 'upper_bound': {'key': 'upperBound', 'type': 'str'}, 'enum_values': {'key': 'enumValues', 'type': '[str]'}, 'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'}, 'description': {'key': 'description', 'type': 'str'}, 'set_environment_variable': {'key': 'setEnvironmentVariable', 'type': 'bool'}, 'environment_variable_override': {'key': 'environmentVariableOverride', 'type': 'str'}, 'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'}, 'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'}, 'ui_hint': {'key': 'uiHint', 'type': 'UIParameterHint'}, 'group_names': {'key': 'groupNames', 'type': '[str]'}, 'argument_name': {'key': 'argumentName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined". :paramtype parameter_type: str or ~flow.models.ParameterType :keyword is_optional: :paramtype is_optional: bool :keyword default_value: :paramtype default_value: str :keyword lower_bound: :paramtype lower_bound: str :keyword upper_bound: :paramtype upper_bound: str :keyword enum_values: :paramtype enum_values: list[str] :keyword enum_values_to_argument_strings: This is a dictionary. :paramtype enum_values_to_argument_strings: dict[str, str] :keyword description: :paramtype description: str :keyword set_environment_variable: :paramtype set_environment_variable: bool :keyword environment_variable_override: :paramtype environment_variable_override: str :keyword enabled_by_parameter_name: :paramtype enabled_by_parameter_name: str :keyword enabled_by_parameter_values: :paramtype enabled_by_parameter_values: list[str] :keyword ui_hint: :paramtype ui_hint: ~flow.models.UIParameterHint :keyword group_names: :paramtype group_names: list[str] :keyword argument_name: :paramtype argument_name: str """ super(StructuredInterfaceParameter, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.label = kwargs.get('label', None) self.parameter_type = kwargs.get('parameter_type', None) self.is_optional = kwargs.get('is_optional', None) self.default_value = kwargs.get('default_value', None) self.lower_bound = kwargs.get('lower_bound', None) self.upper_bound = kwargs.get('upper_bound', None) self.enum_values = kwargs.get('enum_values', None) self.enum_values_to_argument_strings = kwargs.get('enum_values_to_argument_strings', None) self.description = kwargs.get('description', None) self.set_environment_variable = kwargs.get('set_environment_variable', None) self.environment_variable_override = kwargs.get('environment_variable_override', None) self.enabled_by_parameter_name = kwargs.get('enabled_by_parameter_name', None) self.enabled_by_parameter_values = kwargs.get('enabled_by_parameter_values', None) self.ui_hint = kwargs.get('ui_hint', None) self.group_names = kwargs.get('group_names', None) self.argument_name = kwargs.get('argument_name', None) class StudioMigrationInfo(msrest.serialization.Model): """StudioMigrationInfo. Variables are only populated by the server, and will be ignored when sending a request. :ivar source_workspace_id: :vartype source_workspace_id: str :ivar source_experiment_id: :vartype source_experiment_id: str :ivar source_experiment_link: :vartype source_experiment_link: str :ivar failed_node_id_list: :vartype failed_node_id_list: list[str] :ivar error_message: :vartype error_message: str """ _validation = { 'error_message': {'readonly': True}, } _attribute_map = { 'source_workspace_id': {'key': 'sourceWorkspaceId', 'type': 'str'}, 'source_experiment_id': {'key': 'sourceExperimentId', 'type': 'str'}, 'source_experiment_link': {'key': 'sourceExperimentLink', 'type': 'str'}, 'failed_node_id_list': {'key': 'failedNodeIdList', 'type': '[str]'}, 'error_message': {'key': 'errorMessage', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword source_workspace_id: :paramtype source_workspace_id: str :keyword source_experiment_id: :paramtype source_experiment_id: str :keyword source_experiment_link: :paramtype source_experiment_link: str :keyword failed_node_id_list: :paramtype failed_node_id_list: list[str] """ super(StudioMigrationInfo, self).__init__(**kwargs) self.source_workspace_id = kwargs.get('source_workspace_id', None) self.source_experiment_id = kwargs.get('source_experiment_id', None) self.source_experiment_link = kwargs.get('source_experiment_link', None) self.failed_node_id_list = kwargs.get('failed_node_id_list', None) self.error_message = None class SubGraphConcatenateAssignment(msrest.serialization.Model): """SubGraphConcatenateAssignment. :ivar concatenate_parameter: :vartype concatenate_parameter: list[~flow.models.ParameterAssignment] :ivar parameter_assignments: :vartype parameter_assignments: ~flow.models.SubPipelineParameterAssignment """ _attribute_map = { 'concatenate_parameter': {'key': 'concatenateParameter', 'type': '[ParameterAssignment]'}, 'parameter_assignments': {'key': 'parameterAssignments', 'type': 'SubPipelineParameterAssignment'}, } def __init__( self, **kwargs ): """ :keyword concatenate_parameter: :paramtype concatenate_parameter: list[~flow.models.ParameterAssignment] :keyword parameter_assignments: :paramtype parameter_assignments: ~flow.models.SubPipelineParameterAssignment """ super(SubGraphConcatenateAssignment, self).__init__(**kwargs) self.concatenate_parameter = kwargs.get('concatenate_parameter', None) self.parameter_assignments = kwargs.get('parameter_assignments', None) class SubGraphConfiguration(msrest.serialization.Model): """SubGraphConfiguration. :ivar graph_id: :vartype graph_id: str :ivar graph_draft_id: :vartype graph_draft_id: str :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.CloudPrioritySetting :ivar is_dynamic: :vartype is_dynamic: bool """ _attribute_map = { 'graph_id': {'key': 'graphId', 'type': 'str'}, 'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'}, 'default_cloud_priority': {'key': 'DefaultCloudPriority', 'type': 'CloudPrioritySetting'}, 'is_dynamic': {'key': 'IsDynamic', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword graph_id: :paramtype graph_id: str :keyword graph_draft_id: :paramtype graph_draft_id: str :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting :keyword is_dynamic: :paramtype is_dynamic: bool """ super(SubGraphConfiguration, self).__init__(**kwargs) self.graph_id = kwargs.get('graph_id', None) self.graph_draft_id = kwargs.get('graph_draft_id', None) self.default_cloud_priority = kwargs.get('default_cloud_priority', None) self.is_dynamic = kwargs.get('is_dynamic', False) class SubGraphConnectionInfo(msrest.serialization.Model): """SubGraphConnectionInfo. :ivar node_id: :vartype node_id: str :ivar port_name: :vartype port_name: str """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword port_name: :paramtype port_name: str """ super(SubGraphConnectionInfo, self).__init__(**kwargs) self.node_id = kwargs.get('node_id', None) self.port_name = kwargs.get('port_name', None) class SubGraphDataPathParameterAssignment(msrest.serialization.Model): """SubGraphDataPathParameterAssignment. :ivar data_set_path_parameter: :vartype data_set_path_parameter: ~flow.models.DataSetPathParameter :ivar data_set_path_parameter_assignments: :vartype data_set_path_parameter_assignments: list[str] """ _attribute_map = { 'data_set_path_parameter': {'key': 'dataSetPathParameter', 'type': 'DataSetPathParameter'}, 'data_set_path_parameter_assignments': {'key': 'dataSetPathParameterAssignments', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword data_set_path_parameter: :paramtype data_set_path_parameter: ~flow.models.DataSetPathParameter :keyword data_set_path_parameter_assignments: :paramtype data_set_path_parameter_assignments: list[str] """ super(SubGraphDataPathParameterAssignment, self).__init__(**kwargs) self.data_set_path_parameter = kwargs.get('data_set_path_parameter', None) self.data_set_path_parameter_assignments = kwargs.get('data_set_path_parameter_assignments', None) class SubGraphInfo(msrest.serialization.Model): """SubGraphInfo. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar default_compute_target: :vartype default_compute_target: ~flow.models.ComputeSetting :ivar default_data_store: :vartype default_data_store: ~flow.models.DatastoreSetting :ivar id: :vartype id: str :ivar parent_graph_id: :vartype parent_graph_id: str :ivar pipeline_definition_id: :vartype pipeline_definition_id: str :ivar sub_graph_parameter_assignment: :vartype sub_graph_parameter_assignment: list[~flow.models.SubGraphParameterAssignment] :ivar sub_graph_concatenate_assignment: :vartype sub_graph_concatenate_assignment: list[~flow.models.SubGraphConcatenateAssignment] :ivar sub_graph_data_path_parameter_assignment: :vartype sub_graph_data_path_parameter_assignment: list[~flow.models.SubGraphDataPathParameterAssignment] :ivar sub_graph_default_compute_target_nodes: :vartype sub_graph_default_compute_target_nodes: list[str] :ivar sub_graph_default_data_store_nodes: :vartype sub_graph_default_data_store_nodes: list[str] :ivar inputs: :vartype inputs: list[~flow.models.SubGraphPortInfo] :ivar outputs: :vartype outputs: list[~flow.models.SubGraphPortInfo] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'default_compute_target': {'key': 'defaultComputeTarget', 'type': 'ComputeSetting'}, 'default_data_store': {'key': 'defaultDataStore', 'type': 'DatastoreSetting'}, 'id': {'key': 'id', 'type': 'str'}, 'parent_graph_id': {'key': 'parentGraphId', 'type': 'str'}, 'pipeline_definition_id': {'key': 'pipelineDefinitionId', 'type': 'str'}, 'sub_graph_parameter_assignment': {'key': 'subGraphParameterAssignment', 'type': '[SubGraphParameterAssignment]'}, 'sub_graph_concatenate_assignment': {'key': 'subGraphConcatenateAssignment', 'type': '[SubGraphConcatenateAssignment]'}, 'sub_graph_data_path_parameter_assignment': {'key': 'subGraphDataPathParameterAssignment', 'type': '[SubGraphDataPathParameterAssignment]'}, 'sub_graph_default_compute_target_nodes': {'key': 'subGraphDefaultComputeTargetNodes', 'type': '[str]'}, 'sub_graph_default_data_store_nodes': {'key': 'subGraphDefaultDataStoreNodes', 'type': '[str]'}, 'inputs': {'key': 'inputs', 'type': '[SubGraphPortInfo]'}, 'outputs': {'key': 'outputs', 'type': '[SubGraphPortInfo]'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword default_compute_target: :paramtype default_compute_target: ~flow.models.ComputeSetting :keyword default_data_store: :paramtype default_data_store: ~flow.models.DatastoreSetting :keyword id: :paramtype id: str :keyword parent_graph_id: :paramtype parent_graph_id: str :keyword pipeline_definition_id: :paramtype pipeline_definition_id: str :keyword sub_graph_parameter_assignment: :paramtype sub_graph_parameter_assignment: list[~flow.models.SubGraphParameterAssignment] :keyword sub_graph_concatenate_assignment: :paramtype sub_graph_concatenate_assignment: list[~flow.models.SubGraphConcatenateAssignment] :keyword sub_graph_data_path_parameter_assignment: :paramtype sub_graph_data_path_parameter_assignment: list[~flow.models.SubGraphDataPathParameterAssignment] :keyword sub_graph_default_compute_target_nodes: :paramtype sub_graph_default_compute_target_nodes: list[str] :keyword sub_graph_default_data_store_nodes: :paramtype sub_graph_default_data_store_nodes: list[str] :keyword inputs: :paramtype inputs: list[~flow.models.SubGraphPortInfo] :keyword outputs: :paramtype outputs: list[~flow.models.SubGraphPortInfo] """ super(SubGraphInfo, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.default_compute_target = kwargs.get('default_compute_target', None) self.default_data_store = kwargs.get('default_data_store', None) self.id = kwargs.get('id', None) self.parent_graph_id = kwargs.get('parent_graph_id', None) self.pipeline_definition_id = kwargs.get('pipeline_definition_id', None) self.sub_graph_parameter_assignment = kwargs.get('sub_graph_parameter_assignment', None) self.sub_graph_concatenate_assignment = kwargs.get('sub_graph_concatenate_assignment', None) self.sub_graph_data_path_parameter_assignment = kwargs.get('sub_graph_data_path_parameter_assignment', None) self.sub_graph_default_compute_target_nodes = kwargs.get('sub_graph_default_compute_target_nodes', None) self.sub_graph_default_data_store_nodes = kwargs.get('sub_graph_default_data_store_nodes', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) class SubGraphParameterAssignment(msrest.serialization.Model): """SubGraphParameterAssignment. :ivar parameter: :vartype parameter: ~flow.models.Parameter :ivar parameter_assignments: :vartype parameter_assignments: list[~flow.models.SubPipelineParameterAssignment] """ _attribute_map = { 'parameter': {'key': 'parameter', 'type': 'Parameter'}, 'parameter_assignments': {'key': 'parameterAssignments', 'type': '[SubPipelineParameterAssignment]'}, } def __init__( self, **kwargs ): """ :keyword parameter: :paramtype parameter: ~flow.models.Parameter :keyword parameter_assignments: :paramtype parameter_assignments: list[~flow.models.SubPipelineParameterAssignment] """ super(SubGraphParameterAssignment, self).__init__(**kwargs) self.parameter = kwargs.get('parameter', None) self.parameter_assignments = kwargs.get('parameter_assignments', None) class SubGraphPortInfo(msrest.serialization.Model): """SubGraphPortInfo. :ivar name: :vartype name: str :ivar internal: :vartype internal: list[~flow.models.SubGraphConnectionInfo] :ivar external: :vartype external: list[~flow.models.SubGraphConnectionInfo] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'internal': {'key': 'internal', 'type': '[SubGraphConnectionInfo]'}, 'external': {'key': 'external', 'type': '[SubGraphConnectionInfo]'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword internal: :paramtype internal: list[~flow.models.SubGraphConnectionInfo] :keyword external: :paramtype external: list[~flow.models.SubGraphConnectionInfo] """ super(SubGraphPortInfo, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.internal = kwargs.get('internal', None) self.external = kwargs.get('external', None) class SubmitBulkRunRequest(msrest.serialization.Model): """SubmitBulkRunRequest. :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar flow_definition_resource_id: :vartype flow_definition_resource_id: str :ivar flow_definition_data_store_name: :vartype flow_definition_data_store_name: str :ivar flow_definition_blob_path: :vartype flow_definition_blob_path: str :ivar flow_definition_data_uri: :vartype flow_definition_data_uri: str :ivar run_id: :vartype run_id: str :ivar run_display_name: :vartype run_display_name: str :ivar run_experiment_name: :vartype run_experiment_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar node_variant: :vartype node_variant: str :ivar variant_run_id: :vartype variant_run_id: str :ivar baseline_run_id: :vartype baseline_run_id: str :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar inputs_mapping: This is a dictionary. :vartype inputs_mapping: dict[str, str] :ivar connections: This is a dictionary. :vartype connections: dict[str, dict[str, str]] :ivar environment_variables: This is a dictionary. :vartype environment_variables: dict[str, str] :ivar aml_compute_name: :vartype aml_compute_name: str :ivar runtime_name: :vartype runtime_name: str :ivar session_id: :vartype session_id: str :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar session_setup_mode: Possible values include: "ClientWait", "SystemWait". :vartype session_setup_mode: str or ~flow.models.SessionSetupModeEnum :ivar output_data_store: :vartype output_data_store: str :ivar flow_lineage_id: :vartype flow_lineage_id: str :ivar run_display_name_generation_type: Possible values include: "AutoAppend", "UserProvidedMacro". :vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType """ _attribute_map = { 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'flow_definition_resource_id': {'key': 'flowDefinitionResourceId', 'type': 'str'}, 'flow_definition_data_store_name': {'key': 'flowDefinitionDataStoreName', 'type': 'str'}, 'flow_definition_blob_path': {'key': 'flowDefinitionBlobPath', 'type': 'str'}, 'flow_definition_data_uri': {'key': 'flowDefinitionDataUri', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'run_display_name': {'key': 'runDisplayName', 'type': 'str'}, 'run_experiment_name': {'key': 'runExperimentName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'node_variant': {'key': 'nodeVariant', 'type': 'str'}, 'variant_run_id': {'key': 'variantRunId', 'type': 'str'}, 'baseline_run_id': {'key': 'baselineRunId', 'type': 'str'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'}, 'connections': {'key': 'connections', 'type': '{{str}}'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'session_setup_mode': {'key': 'sessionSetupMode', 'type': 'str'}, 'output_data_store': {'key': 'outputDataStore', 'type': 'str'}, 'flow_lineage_id': {'key': 'flowLineageId', 'type': 'str'}, 'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword flow_definition_resource_id: :paramtype flow_definition_resource_id: str :keyword flow_definition_data_store_name: :paramtype flow_definition_data_store_name: str :keyword flow_definition_blob_path: :paramtype flow_definition_blob_path: str :keyword flow_definition_data_uri: :paramtype flow_definition_data_uri: str :keyword run_id: :paramtype run_id: str :keyword run_display_name: :paramtype run_display_name: str :keyword run_experiment_name: :paramtype run_experiment_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword node_variant: :paramtype node_variant: str :keyword variant_run_id: :paramtype variant_run_id: str :keyword baseline_run_id: :paramtype baseline_run_id: str :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword inputs_mapping: This is a dictionary. :paramtype inputs_mapping: dict[str, str] :keyword connections: This is a dictionary. :paramtype connections: dict[str, dict[str, str]] :keyword environment_variables: This is a dictionary. :paramtype environment_variables: dict[str, str] :keyword aml_compute_name: :paramtype aml_compute_name: str :keyword runtime_name: :paramtype runtime_name: str :keyword session_id: :paramtype session_id: str :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword session_setup_mode: Possible values include: "ClientWait", "SystemWait". :paramtype session_setup_mode: str or ~flow.models.SessionSetupModeEnum :keyword output_data_store: :paramtype output_data_store: str :keyword flow_lineage_id: :paramtype flow_lineage_id: str :keyword run_display_name_generation_type: Possible values include: "AutoAppend", "UserProvidedMacro". :paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType """ super(SubmitBulkRunRequest, self).__init__(**kwargs) self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None) self.flow_definition_resource_id = kwargs.get('flow_definition_resource_id', None) self.flow_definition_data_store_name = kwargs.get('flow_definition_data_store_name', None) self.flow_definition_blob_path = kwargs.get('flow_definition_blob_path', None) self.flow_definition_data_uri = kwargs.get('flow_definition_data_uri', None) self.run_id = kwargs.get('run_id', None) self.run_display_name = kwargs.get('run_display_name', None) self.run_experiment_name = kwargs.get('run_experiment_name', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) self.node_variant = kwargs.get('node_variant', None) self.variant_run_id = kwargs.get('variant_run_id', None) self.baseline_run_id = kwargs.get('baseline_run_id', None) self.batch_data_input = kwargs.get('batch_data_input', None) self.inputs_mapping = kwargs.get('inputs_mapping', None) self.connections = kwargs.get('connections', None) self.environment_variables = kwargs.get('environment_variables', None) self.aml_compute_name = kwargs.get('aml_compute_name', None) self.runtime_name = kwargs.get('runtime_name', None) self.session_id = kwargs.get('session_id', None) self.vm_size = kwargs.get('vm_size', None) self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None) self.session_setup_mode = kwargs.get('session_setup_mode', None) self.output_data_store = kwargs.get('output_data_store', None) self.flow_lineage_id = kwargs.get('flow_lineage_id', None) self.run_display_name_generation_type = kwargs.get('run_display_name_generation_type', None) class SubmitBulkRunResponse(msrest.serialization.Model): """SubmitBulkRunResponse. :ivar next_action_interval_in_seconds: :vartype next_action_interval_in_seconds: int :ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent". :vartype action_type: str or ~flow.models.ActionType :ivar flow_runs: :vartype flow_runs: list[any] :ivar node_runs: :vartype node_runs: list[any] :ivar error_response: The error response. :vartype error_response: ~flow.models.ErrorResponse :ivar flow_name: :vartype flow_name: str :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar flow_run_id: :vartype flow_run_id: str :ivar flow_graph: :vartype flow_graph: ~flow.models.FlowGraph :ivar flow_graph_layout: :vartype flow_graph_layout: ~flow.models.FlowGraphLayout :ivar flow_run_resource_id: :vartype flow_run_resource_id: str :ivar bulk_test_id: :vartype bulk_test_id: str :ivar batch_inputs: :vartype batch_inputs: list[dict[str, any]] :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar created_by: :vartype created_by: ~flow.models.SchemaContractsCreatedBy :ivar created_on: :vartype created_on: ~datetime.datetime :ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun". :vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar runtime_name: :vartype runtime_name: str :ivar aml_compute_name: :vartype aml_compute_name: str :ivar flow_run_logs: Dictionary of :code:`<string>`. :vartype flow_run_logs: dict[str, str] :ivar flow_test_mode: Possible values include: "Sync", "Async". :vartype flow_test_mode: str or ~flow.models.FlowTestMode :ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :ivar working_directory: :vartype working_directory: str :ivar flow_dag_file_relative_path: :vartype flow_dag_file_relative_path: str :ivar flow_snapshot_id: :vartype flow_snapshot_id: str :ivar variant_run_to_evaluation_runs_id_mapping: Dictionary of <componentsΒ·1mlssi7Β·schemasΒ·submitbulkrunresponseΒ·propertiesΒ·variantruntoevaluationrunsidmappingΒ·additionalproperties>. :vartype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]] """ _attribute_map = { 'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'}, 'action_type': {'key': 'actionType', 'type': 'str'}, 'flow_runs': {'key': 'flow_runs', 'type': '[object]'}, 'node_runs': {'key': 'node_runs', 'type': '[object]'}, 'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'flow_run_id': {'key': 'flowRunId', 'type': 'str'}, 'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'}, 'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'}, 'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'}, 'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'}, 'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'flow_run_type': {'key': 'flowRunType', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'}, 'flow_run_logs': {'key': 'flowRunLogs', 'type': '{str}'}, 'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'}, 'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'}, 'working_directory': {'key': 'workingDirectory', 'type': 'str'}, 'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'}, 'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'}, 'variant_run_to_evaluation_runs_id_mapping': {'key': 'variantRunToEvaluationRunsIdMapping', 'type': '{[str]}'}, } def __init__( self, **kwargs ): """ :keyword next_action_interval_in_seconds: :paramtype next_action_interval_in_seconds: int :keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent". :paramtype action_type: str or ~flow.models.ActionType :keyword flow_runs: :paramtype flow_runs: list[any] :keyword node_runs: :paramtype node_runs: list[any] :keyword error_response: The error response. :paramtype error_response: ~flow.models.ErrorResponse :keyword flow_name: :paramtype flow_name: str :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword flow_run_id: :paramtype flow_run_id: str :keyword flow_graph: :paramtype flow_graph: ~flow.models.FlowGraph :keyword flow_graph_layout: :paramtype flow_graph_layout: ~flow.models.FlowGraphLayout :keyword flow_run_resource_id: :paramtype flow_run_resource_id: str :keyword bulk_test_id: :paramtype bulk_test_id: str :keyword batch_inputs: :paramtype batch_inputs: list[dict[str, any]] :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword created_by: :paramtype created_by: ~flow.models.SchemaContractsCreatedBy :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun". :paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword runtime_name: :paramtype runtime_name: str :keyword aml_compute_name: :paramtype aml_compute_name: str :keyword flow_run_logs: Dictionary of :code:`<string>`. :paramtype flow_run_logs: dict[str, str] :keyword flow_test_mode: Possible values include: "Sync", "Async". :paramtype flow_test_mode: str or ~flow.models.FlowTestMode :keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :keyword working_directory: :paramtype working_directory: str :keyword flow_dag_file_relative_path: :paramtype flow_dag_file_relative_path: str :keyword flow_snapshot_id: :paramtype flow_snapshot_id: str :keyword variant_run_to_evaluation_runs_id_mapping: Dictionary of <componentsΒ·1mlssi7Β·schemasΒ·submitbulkrunresponseΒ·propertiesΒ·variantruntoevaluationrunsidmappingΒ·additionalproperties>. :paramtype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]] """ super(SubmitBulkRunResponse, self).__init__(**kwargs) self.next_action_interval_in_seconds = kwargs.get('next_action_interval_in_seconds', None) self.action_type = kwargs.get('action_type', None) self.flow_runs = kwargs.get('flow_runs', None) self.node_runs = kwargs.get('node_runs', None) self.error_response = kwargs.get('error_response', None) self.flow_name = kwargs.get('flow_name', None) self.flow_run_display_name = kwargs.get('flow_run_display_name', None) self.flow_run_id = kwargs.get('flow_run_id', None) self.flow_graph = kwargs.get('flow_graph', None) self.flow_graph_layout = kwargs.get('flow_graph_layout', None) self.flow_run_resource_id = kwargs.get('flow_run_resource_id', None) self.bulk_test_id = kwargs.get('bulk_test_id', None) self.batch_inputs = kwargs.get('batch_inputs', None) self.batch_data_input = kwargs.get('batch_data_input', None) self.created_by = kwargs.get('created_by', None) self.created_on = kwargs.get('created_on', None) self.flow_run_type = kwargs.get('flow_run_type', None) self.flow_type = kwargs.get('flow_type', None) self.runtime_name = kwargs.get('runtime_name', None) self.aml_compute_name = kwargs.get('aml_compute_name', None) self.flow_run_logs = kwargs.get('flow_run_logs', None) self.flow_test_mode = kwargs.get('flow_test_mode', None) self.flow_test_infos = kwargs.get('flow_test_infos', None) self.working_directory = kwargs.get('working_directory', None) self.flow_dag_file_relative_path = kwargs.get('flow_dag_file_relative_path', None) self.flow_snapshot_id = kwargs.get('flow_snapshot_id', None) self.variant_run_to_evaluation_runs_id_mapping = kwargs.get('variant_run_to_evaluation_runs_id_mapping', None) class SubmitFlowRequest(msrest.serialization.Model): """SubmitFlowRequest. :ivar flow_run_id: :vartype flow_run_id: str :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar flow_id: :vartype flow_id: str :ivar flow: :vartype flow: ~flow.models.Flow :ivar flow_submit_run_settings: :vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings :ivar async_submission: :vartype async_submission: bool :ivar use_workspace_connection: :vartype use_workspace_connection: bool :ivar use_flow_snapshot_to_submit: :vartype use_flow_snapshot_to_submit: bool :ivar enable_blob_run_artifacts: :vartype enable_blob_run_artifacts: bool :ivar enable_async_flow_test: :vartype enable_async_flow_test: bool :ivar flow_runtime_submission_api_version: Possible values include: "Version1", "Version2". :vartype flow_runtime_submission_api_version: str or ~flow.models.FlowRuntimeSubmissionApiVersion :ivar run_display_name_generation_type: Possible values include: "AutoAppend", "UserProvidedMacro". :vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType """ _attribute_map = { 'flow_run_id': {'key': 'flowRunId', 'type': 'str'}, 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'flow_id': {'key': 'flowId', 'type': 'str'}, 'flow': {'key': 'flow', 'type': 'Flow'}, 'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'}, 'async_submission': {'key': 'asyncSubmission', 'type': 'bool'}, 'use_workspace_connection': {'key': 'useWorkspaceConnection', 'type': 'bool'}, 'use_flow_snapshot_to_submit': {'key': 'useFlowSnapshotToSubmit', 'type': 'bool'}, 'enable_blob_run_artifacts': {'key': 'enableBlobRunArtifacts', 'type': 'bool'}, 'enable_async_flow_test': {'key': 'enableAsyncFlowTest', 'type': 'bool'}, 'flow_runtime_submission_api_version': {'key': 'flowRuntimeSubmissionApiVersion', 'type': 'str'}, 'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword flow_run_id: :paramtype flow_run_id: str :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword flow_id: :paramtype flow_id: str :keyword flow: :paramtype flow: ~flow.models.Flow :keyword flow_submit_run_settings: :paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings :keyword async_submission: :paramtype async_submission: bool :keyword use_workspace_connection: :paramtype use_workspace_connection: bool :keyword use_flow_snapshot_to_submit: :paramtype use_flow_snapshot_to_submit: bool :keyword enable_blob_run_artifacts: :paramtype enable_blob_run_artifacts: bool :keyword enable_async_flow_test: :paramtype enable_async_flow_test: bool :keyword flow_runtime_submission_api_version: Possible values include: "Version1", "Version2". :paramtype flow_runtime_submission_api_version: str or ~flow.models.FlowRuntimeSubmissionApiVersion :keyword run_display_name_generation_type: Possible values include: "AutoAppend", "UserProvidedMacro". :paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType """ super(SubmitFlowRequest, self).__init__(**kwargs) self.flow_run_id = kwargs.get('flow_run_id', None) self.flow_run_display_name = kwargs.get('flow_run_display_name', None) self.flow_id = kwargs.get('flow_id', None) self.flow = kwargs.get('flow', None) self.flow_submit_run_settings = kwargs.get('flow_submit_run_settings', None) self.async_submission = kwargs.get('async_submission', None) self.use_workspace_connection = kwargs.get('use_workspace_connection', None) self.use_flow_snapshot_to_submit = kwargs.get('use_flow_snapshot_to_submit', None) self.enable_blob_run_artifacts = kwargs.get('enable_blob_run_artifacts', None) self.enable_async_flow_test = kwargs.get('enable_async_flow_test', None) self.flow_runtime_submission_api_version = kwargs.get('flow_runtime_submission_api_version', None) self.run_display_name_generation_type = kwargs.get('run_display_name_generation_type', None) class SubmitPipelineRunRequest(msrest.serialization.Model): """SubmitPipelineRunRequest. :ivar compute_target: :vartype compute_target: str :ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :ivar step_tags: This is a dictionary. :vartype step_tags: dict[str, str] :ivar experiment_name: :vartype experiment_name: str :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar enable_notification: :vartype enable_notification: bool :ivar sub_pipelines_info: :vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo :ivar display_name: :vartype display_name: str :ivar run_id: :vartype run_id: str :ivar parent_run_id: :vartype parent_run_id: str :ivar graph: :vartype graph: ~flow.models.GraphDraftEntity :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar description: :vartype description: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ _attribute_map = { 'compute_target': {'key': 'computeTarget', 'type': 'str'}, 'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'}, 'step_tags': {'key': 'stepTags', 'type': '{str}'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'enable_notification': {'key': 'enableNotification', 'type': 'bool'}, 'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'graph': {'key': 'graph', 'type': 'GraphDraftEntity'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword compute_target: :paramtype compute_target: str :keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :keyword step_tags: This is a dictionary. :paramtype step_tags: dict[str, str] :keyword experiment_name: :paramtype experiment_name: str :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword enable_notification: :paramtype enable_notification: bool :keyword sub_pipelines_info: :paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo :keyword display_name: :paramtype display_name: str :keyword run_id: :paramtype run_id: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword graph: :paramtype graph: ~flow.models.GraphDraftEntity :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword description: :paramtype description: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ super(SubmitPipelineRunRequest, self).__init__(**kwargs) self.compute_target = kwargs.get('compute_target', None) self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None) self.step_tags = kwargs.get('step_tags', None) self.experiment_name = kwargs.get('experiment_name', None) self.pipeline_parameters = kwargs.get('pipeline_parameters', None) self.data_path_assignments = kwargs.get('data_path_assignments', None) self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None) self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None) self.enable_notification = kwargs.get('enable_notification', None) self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None) self.display_name = kwargs.get('display_name', None) self.run_id = kwargs.get('run_id', None) self.parent_run_id = kwargs.get('parent_run_id', None) self.graph = kwargs.get('graph', None) self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None) self.module_node_run_settings = kwargs.get('module_node_run_settings', None) self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None) self.tags = kwargs.get('tags', None) self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None) self.description = kwargs.get('description', None) self.properties = kwargs.get('properties', None) self.enforce_rerun = kwargs.get('enforce_rerun', None) self.dataset_access_modes = kwargs.get('dataset_access_modes', None) class SubPipelineDefinition(msrest.serialization.Model): """SubPipelineDefinition. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar default_compute_target: :vartype default_compute_target: ~flow.models.ComputeSetting :ivar default_data_store: :vartype default_data_store: ~flow.models.DatastoreSetting :ivar pipeline_function_name: :vartype pipeline_function_name: str :ivar id: :vartype id: str :ivar parent_definition_id: :vartype parent_definition_id: str :ivar from_module_name: :vartype from_module_name: str :ivar parameter_list: :vartype parameter_list: list[~flow.models.Kwarg] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'default_compute_target': {'key': 'defaultComputeTarget', 'type': 'ComputeSetting'}, 'default_data_store': {'key': 'defaultDataStore', 'type': 'DatastoreSetting'}, 'pipeline_function_name': {'key': 'pipelineFunctionName', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'parent_definition_id': {'key': 'parentDefinitionId', 'type': 'str'}, 'from_module_name': {'key': 'fromModuleName', 'type': 'str'}, 'parameter_list': {'key': 'parameterList', 'type': '[Kwarg]'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword default_compute_target: :paramtype default_compute_target: ~flow.models.ComputeSetting :keyword default_data_store: :paramtype default_data_store: ~flow.models.DatastoreSetting :keyword pipeline_function_name: :paramtype pipeline_function_name: str :keyword id: :paramtype id: str :keyword parent_definition_id: :paramtype parent_definition_id: str :keyword from_module_name: :paramtype from_module_name: str :keyword parameter_list: :paramtype parameter_list: list[~flow.models.Kwarg] """ super(SubPipelineDefinition, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) self.default_compute_target = kwargs.get('default_compute_target', None) self.default_data_store = kwargs.get('default_data_store', None) self.pipeline_function_name = kwargs.get('pipeline_function_name', None) self.id = kwargs.get('id', None) self.parent_definition_id = kwargs.get('parent_definition_id', None) self.from_module_name = kwargs.get('from_module_name', None) self.parameter_list = kwargs.get('parameter_list', None) class SubPipelineParameterAssignment(msrest.serialization.Model): """SubPipelineParameterAssignment. :ivar node_id: :vartype node_id: str :ivar parameter_name: :vartype parameter_name: str """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'parameter_name': {'key': 'parameterName', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword parameter_name: :paramtype parameter_name: str """ super(SubPipelineParameterAssignment, self).__init__(**kwargs) self.node_id = kwargs.get('node_id', None) self.parameter_name = kwargs.get('parameter_name', None) class SubPipelinesInfo(msrest.serialization.Model): """SubPipelinesInfo. :ivar sub_graph_info: :vartype sub_graph_info: list[~flow.models.SubGraphInfo] :ivar node_id_to_sub_graph_id_mapping: Dictionary of :code:`<string>`. :vartype node_id_to_sub_graph_id_mapping: dict[str, str] :ivar sub_pipeline_definition: :vartype sub_pipeline_definition: list[~flow.models.SubPipelineDefinition] """ _attribute_map = { 'sub_graph_info': {'key': 'subGraphInfo', 'type': '[SubGraphInfo]'}, 'node_id_to_sub_graph_id_mapping': {'key': 'nodeIdToSubGraphIdMapping', 'type': '{str}'}, 'sub_pipeline_definition': {'key': 'subPipelineDefinition', 'type': '[SubPipelineDefinition]'}, } def __init__( self, **kwargs ): """ :keyword sub_graph_info: :paramtype sub_graph_info: list[~flow.models.SubGraphInfo] :keyword node_id_to_sub_graph_id_mapping: Dictionary of :code:`<string>`. :paramtype node_id_to_sub_graph_id_mapping: dict[str, str] :keyword sub_pipeline_definition: :paramtype sub_pipeline_definition: list[~flow.models.SubPipelineDefinition] """ super(SubPipelinesInfo, self).__init__(**kwargs) self.sub_graph_info = kwargs.get('sub_graph_info', None) self.node_id_to_sub_graph_id_mapping = kwargs.get('node_id_to_sub_graph_id_mapping', None) self.sub_pipeline_definition = kwargs.get('sub_pipeline_definition', None) class SubStatusPeriod(msrest.serialization.Model): """SubStatusPeriod. :ivar name: :vartype name: str :ivar sub_periods: :vartype sub_periods: list[~flow.models.SubStatusPeriod] :ivar start: :vartype start: long :ivar end: :vartype end: long """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'sub_periods': {'key': 'subPeriods', 'type': '[SubStatusPeriod]'}, 'start': {'key': 'start', 'type': 'long'}, 'end': {'key': 'end', 'type': 'long'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword sub_periods: :paramtype sub_periods: list[~flow.models.SubStatusPeriod] :keyword start: :paramtype start: long :keyword end: :paramtype end: long """ super(SubStatusPeriod, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.sub_periods = kwargs.get('sub_periods', None) self.start = kwargs.get('start', None) self.end = kwargs.get('end', None) class SweepEarlyTerminationPolicy(msrest.serialization.Model): """SweepEarlyTerminationPolicy. :ivar policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection". :vartype policy_type: str or ~flow.models.EarlyTerminationPolicyType :ivar evaluation_interval: :vartype evaluation_interval: int :ivar delay_evaluation: :vartype delay_evaluation: int :ivar slack_factor: :vartype slack_factor: float :ivar slack_amount: :vartype slack_amount: float :ivar truncation_percentage: :vartype truncation_percentage: int """ _attribute_map = { 'policy_type': {'key': 'policyType', 'type': 'str'}, 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, 'slack_factor': {'key': 'slackFactor', 'type': 'float'}, 'slack_amount': {'key': 'slackAmount', 'type': 'float'}, 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection". :paramtype policy_type: str or ~flow.models.EarlyTerminationPolicyType :keyword evaluation_interval: :paramtype evaluation_interval: int :keyword delay_evaluation: :paramtype delay_evaluation: int :keyword slack_factor: :paramtype slack_factor: float :keyword slack_amount: :paramtype slack_amount: float :keyword truncation_percentage: :paramtype truncation_percentage: int """ super(SweepEarlyTerminationPolicy, self).__init__(**kwargs) self.policy_type = kwargs.get('policy_type', None) self.evaluation_interval = kwargs.get('evaluation_interval', None) self.delay_evaluation = kwargs.get('delay_evaluation', None) self.slack_factor = kwargs.get('slack_factor', None) self.slack_amount = kwargs.get('slack_amount', None) self.truncation_percentage = kwargs.get('truncation_percentage', None) class SweepSettings(msrest.serialization.Model): """SweepSettings. :ivar limits: :vartype limits: ~flow.models.SweepSettingsLimits :ivar search_space: :vartype search_space: list[dict[str, str]] :ivar sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian". :vartype sampling_algorithm: str or ~flow.models.SamplingAlgorithmType :ivar early_termination: :vartype early_termination: ~flow.models.SweepEarlyTerminationPolicy """ _attribute_map = { 'limits': {'key': 'limits', 'type': 'SweepSettingsLimits'}, 'search_space': {'key': 'searchSpace', 'type': '[{str}]'}, 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, 'early_termination': {'key': 'earlyTermination', 'type': 'SweepEarlyTerminationPolicy'}, } def __init__( self, **kwargs ): """ :keyword limits: :paramtype limits: ~flow.models.SweepSettingsLimits :keyword search_space: :paramtype search_space: list[dict[str, str]] :keyword sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian". :paramtype sampling_algorithm: str or ~flow.models.SamplingAlgorithmType :keyword early_termination: :paramtype early_termination: ~flow.models.SweepEarlyTerminationPolicy """ super(SweepSettings, self).__init__(**kwargs) self.limits = kwargs.get('limits', None) self.search_space = kwargs.get('search_space', None) self.sampling_algorithm = kwargs.get('sampling_algorithm', None) self.early_termination = kwargs.get('early_termination', None) class SweepSettingsLimits(msrest.serialization.Model): """SweepSettingsLimits. :ivar max_total_trials: :vartype max_total_trials: int :ivar max_concurrent_trials: :vartype max_concurrent_trials: int """ _attribute_map = { 'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'}, 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword max_total_trials: :paramtype max_total_trials: int :keyword max_concurrent_trials: :paramtype max_concurrent_trials: int """ super(SweepSettingsLimits, self).__init__(**kwargs) self.max_total_trials = kwargs.get('max_total_trials', None) self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None) class SystemData(msrest.serialization.Model): """SystemData. :ivar created_at: :vartype created_at: ~datetime.datetime :ivar created_by: :vartype created_by: str :ivar created_by_type: Possible values include: "User", "Application", "ManagedIdentity", "Key". :vartype created_by_type: str or ~flow.models.UserType :ivar last_modified_at: :vartype last_modified_at: ~datetime.datetime :ivar last_modified_by: :vartype last_modified_by: str :ivar last_modified_by_type: Possible values include: "User", "Application", "ManagedIdentity", "Key". :vartype last_modified_by_type: str or ~flow.models.UserType """ _attribute_map = { 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, 'created_by': {'key': 'createdBy', 'type': 'str'}, 'created_by_type': {'key': 'createdByType', 'type': 'str'}, 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword created_at: :paramtype created_at: ~datetime.datetime :keyword created_by: :paramtype created_by: str :keyword created_by_type: Possible values include: "User", "Application", "ManagedIdentity", "Key". :paramtype created_by_type: str or ~flow.models.UserType :keyword last_modified_at: :paramtype last_modified_at: ~datetime.datetime :keyword last_modified_by: :paramtype last_modified_by: str :keyword last_modified_by_type: Possible values include: "User", "Application", "ManagedIdentity", "Key". :paramtype last_modified_by_type: str or ~flow.models.UserType """ super(SystemData, self).__init__(**kwargs) self.created_at = kwargs.get('created_at', None) self.created_by = kwargs.get('created_by', None) self.created_by_type = kwargs.get('created_by_type', None) self.last_modified_at = kwargs.get('last_modified_at', None) self.last_modified_by = kwargs.get('last_modified_by', None) self.last_modified_by_type = kwargs.get('last_modified_by_type', None) class SystemMeta(msrest.serialization.Model): """SystemMeta. :ivar identifier_hash: :vartype identifier_hash: str :ivar extra_hash: :vartype extra_hash: str :ivar content_hash: :vartype content_hash: str :ivar identifier_hashes: :vartype identifier_hashes: ~flow.models.SystemMetaIdentifierHashes :ivar extra_hashes: :vartype extra_hashes: ~flow.models.SystemMetaExtraHashes """ _attribute_map = { 'identifier_hash': {'key': 'identifierHash', 'type': 'str'}, 'extra_hash': {'key': 'extraHash', 'type': 'str'}, 'content_hash': {'key': 'contentHash', 'type': 'str'}, 'identifier_hashes': {'key': 'identifierHashes', 'type': 'SystemMetaIdentifierHashes'}, 'extra_hashes': {'key': 'extraHashes', 'type': 'SystemMetaExtraHashes'}, } def __init__( self, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword extra_hash: :paramtype extra_hash: str :keyword content_hash: :paramtype content_hash: str :keyword identifier_hashes: :paramtype identifier_hashes: ~flow.models.SystemMetaIdentifierHashes :keyword extra_hashes: :paramtype extra_hashes: ~flow.models.SystemMetaExtraHashes """ super(SystemMeta, self).__init__(**kwargs) self.identifier_hash = kwargs.get('identifier_hash', None) self.extra_hash = kwargs.get('extra_hash', None) self.content_hash = kwargs.get('content_hash', None) self.identifier_hashes = kwargs.get('identifier_hashes', None) self.extra_hashes = kwargs.get('extra_hashes', None) class SystemMetaExtraHashes(msrest.serialization.Model): """SystemMetaExtraHashes. :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str """ _attribute_map = { 'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str """ super(SystemMetaExtraHashes, self).__init__(**kwargs) self.identifier_hash = kwargs.get('identifier_hash', None) self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None) class SystemMetaIdentifierHashes(msrest.serialization.Model): """SystemMetaIdentifierHashes. :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str """ _attribute_map = { 'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str """ super(SystemMetaIdentifierHashes, self).__init__(**kwargs) self.identifier_hash = kwargs.get('identifier_hash', None) self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None) class TargetLags(msrest.serialization.Model): """TargetLags. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.TargetLagsMode :ivar values: :vartype values: list[int] """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'values': {'key': 'values', 'type': '[int]'}, } def __init__( self, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.TargetLagsMode :keyword values: :paramtype values: list[int] """ super(TargetLags, self).__init__(**kwargs) self.mode = kwargs.get('mode', None) self.values = kwargs.get('values', None) class TargetRollingWindowSize(msrest.serialization.Model): """TargetRollingWindowSize. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.TargetRollingWindowSizeMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.TargetRollingWindowSizeMode :keyword value: :paramtype value: int """ super(TargetRollingWindowSize, self).__init__(**kwargs) self.mode = kwargs.get('mode', None) self.value = kwargs.get('value', None) class TargetSelectorConfiguration(msrest.serialization.Model): """TargetSelectorConfiguration. :ivar low_priority_vm_tolerant: :vartype low_priority_vm_tolerant: bool :ivar cluster_block_list: :vartype cluster_block_list: list[str] :ivar compute_type: :vartype compute_type: str :ivar instance_type: :vartype instance_type: list[str] :ivar instance_types: :vartype instance_types: list[str] :ivar my_resource_only: :vartype my_resource_only: bool :ivar plan_id: :vartype plan_id: str :ivar plan_region_id: :vartype plan_region_id: str :ivar region: :vartype region: list[str] :ivar regions: :vartype regions: list[str] :ivar vc_block_list: :vartype vc_block_list: list[str] """ _attribute_map = { 'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'}, 'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'instance_type': {'key': 'instanceType', 'type': '[str]'}, 'instance_types': {'key': 'instanceTypes', 'type': '[str]'}, 'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'}, 'plan_id': {'key': 'planId', 'type': 'str'}, 'plan_region_id': {'key': 'planRegionId', 'type': 'str'}, 'region': {'key': 'region', 'type': '[str]'}, 'regions': {'key': 'regions', 'type': '[str]'}, 'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword low_priority_vm_tolerant: :paramtype low_priority_vm_tolerant: bool :keyword cluster_block_list: :paramtype cluster_block_list: list[str] :keyword compute_type: :paramtype compute_type: str :keyword instance_type: :paramtype instance_type: list[str] :keyword instance_types: :paramtype instance_types: list[str] :keyword my_resource_only: :paramtype my_resource_only: bool :keyword plan_id: :paramtype plan_id: str :keyword plan_region_id: :paramtype plan_region_id: str :keyword region: :paramtype region: list[str] :keyword regions: :paramtype regions: list[str] :keyword vc_block_list: :paramtype vc_block_list: list[str] """ super(TargetSelectorConfiguration, self).__init__(**kwargs) self.low_priority_vm_tolerant = kwargs.get('low_priority_vm_tolerant', None) self.cluster_block_list = kwargs.get('cluster_block_list', None) self.compute_type = kwargs.get('compute_type', None) self.instance_type = kwargs.get('instance_type', None) self.instance_types = kwargs.get('instance_types', None) self.my_resource_only = kwargs.get('my_resource_only', None) self.plan_id = kwargs.get('plan_id', None) self.plan_region_id = kwargs.get('plan_region_id', None) self.region = kwargs.get('region', None) self.regions = kwargs.get('regions', None) self.vc_block_list = kwargs.get('vc_block_list', None) class Task(msrest.serialization.Model): """Task. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: :vartype id: int :ivar exception: Anything. :vartype exception: any :ivar status: Possible values include: "Created", "WaitingForActivation", "WaitingToRun", "Running", "WaitingForChildrenToComplete", "RanToCompletion", "Canceled", "Faulted". :vartype status: str or ~flow.models.TaskStatus :ivar is_canceled: :vartype is_canceled: bool :ivar is_completed: :vartype is_completed: bool :ivar is_completed_successfully: :vartype is_completed_successfully: bool :ivar creation_options: Possible values include: "None", "PreferFairness", "LongRunning", "AttachedToParent", "DenyChildAttach", "HideScheduler", "RunContinuationsAsynchronously". :vartype creation_options: str or ~flow.models.TaskCreationOptions :ivar async_state: Anything. :vartype async_state: any :ivar is_faulted: :vartype is_faulted: bool """ _validation = { 'id': {'readonly': True}, 'exception': {'readonly': True}, 'is_canceled': {'readonly': True}, 'is_completed': {'readonly': True}, 'is_completed_successfully': {'readonly': True}, 'async_state': {'readonly': True}, 'is_faulted': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'int'}, 'exception': {'key': 'exception', 'type': 'object'}, 'status': {'key': 'status', 'type': 'str'}, 'is_canceled': {'key': 'isCanceled', 'type': 'bool'}, 'is_completed': {'key': 'isCompleted', 'type': 'bool'}, 'is_completed_successfully': {'key': 'isCompletedSuccessfully', 'type': 'bool'}, 'creation_options': {'key': 'creationOptions', 'type': 'str'}, 'async_state': {'key': 'asyncState', 'type': 'object'}, 'is_faulted': {'key': 'isFaulted', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword status: Possible values include: "Created", "WaitingForActivation", "WaitingToRun", "Running", "WaitingForChildrenToComplete", "RanToCompletion", "Canceled", "Faulted". :paramtype status: str or ~flow.models.TaskStatus :keyword creation_options: Possible values include: "None", "PreferFairness", "LongRunning", "AttachedToParent", "DenyChildAttach", "HideScheduler", "RunContinuationsAsynchronously". :paramtype creation_options: str or ~flow.models.TaskCreationOptions """ super(Task, self).__init__(**kwargs) self.id = None self.exception = None self.status = kwargs.get('status', None) self.is_canceled = None self.is_completed = None self.is_completed_successfully = None self.creation_options = kwargs.get('creation_options', None) self.async_state = None self.is_faulted = None class TaskControlFlowInfo(msrest.serialization.Model): """TaskControlFlowInfo. :ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor". :vartype control_flow_type: str or ~flow.models.ControlFlowType :ivar iteration_index: :vartype iteration_index: int :ivar item_name: :vartype item_name: str :ivar parameters_overwritten: Dictionary of :code:`<string>`. :vartype parameters_overwritten: dict[str, str] :ivar is_reused: :vartype is_reused: bool """ _attribute_map = { 'control_flow_type': {'key': 'controlFlowType', 'type': 'str'}, 'iteration_index': {'key': 'iterationIndex', 'type': 'int'}, 'item_name': {'key': 'itemName', 'type': 'str'}, 'parameters_overwritten': {'key': 'parametersOverwritten', 'type': '{str}'}, 'is_reused': {'key': 'isReused', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor". :paramtype control_flow_type: str or ~flow.models.ControlFlowType :keyword iteration_index: :paramtype iteration_index: int :keyword item_name: :paramtype item_name: str :keyword parameters_overwritten: Dictionary of :code:`<string>`. :paramtype parameters_overwritten: dict[str, str] :keyword is_reused: :paramtype is_reused: bool """ super(TaskControlFlowInfo, self).__init__(**kwargs) self.control_flow_type = kwargs.get('control_flow_type', None) self.iteration_index = kwargs.get('iteration_index', None) self.item_name = kwargs.get('item_name', None) self.parameters_overwritten = kwargs.get('parameters_overwritten', None) self.is_reused = kwargs.get('is_reused', None) class TaskReuseInfo(msrest.serialization.Model): """TaskReuseInfo. :ivar experiment_id: :vartype experiment_id: str :ivar pipeline_run_id: :vartype pipeline_run_id: str :ivar node_id: :vartype node_id: str :ivar request_id: :vartype request_id: str :ivar run_id: :vartype run_id: str :ivar node_start_time: :vartype node_start_time: ~datetime.datetime :ivar node_end_time: :vartype node_end_time: ~datetime.datetime """ _attribute_map = { 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, 'node_id': {'key': 'nodeId', 'type': 'str'}, 'request_id': {'key': 'requestId', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'node_start_time': {'key': 'nodeStartTime', 'type': 'iso-8601'}, 'node_end_time': {'key': 'nodeEndTime', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword experiment_id: :paramtype experiment_id: str :keyword pipeline_run_id: :paramtype pipeline_run_id: str :keyword node_id: :paramtype node_id: str :keyword request_id: :paramtype request_id: str :keyword run_id: :paramtype run_id: str :keyword node_start_time: :paramtype node_start_time: ~datetime.datetime :keyword node_end_time: :paramtype node_end_time: ~datetime.datetime """ super(TaskReuseInfo, self).__init__(**kwargs) self.experiment_id = kwargs.get('experiment_id', None) self.pipeline_run_id = kwargs.get('pipeline_run_id', None) self.node_id = kwargs.get('node_id', None) self.request_id = kwargs.get('request_id', None) self.run_id = kwargs.get('run_id', None) self.node_start_time = kwargs.get('node_start_time', None) self.node_end_time = kwargs.get('node_end_time', None) class TensorflowConfiguration(msrest.serialization.Model): """TensorflowConfiguration. :ivar worker_count: :vartype worker_count: int :ivar parameter_server_count: :vartype parameter_server_count: int """ _attribute_map = { 'worker_count': {'key': 'workerCount', 'type': 'int'}, 'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword worker_count: :paramtype worker_count: int :keyword parameter_server_count: :paramtype parameter_server_count: int """ super(TensorflowConfiguration, self).__init__(**kwargs) self.worker_count = kwargs.get('worker_count', None) self.parameter_server_count = kwargs.get('parameter_server_count', None) class TestDataSettings(msrest.serialization.Model): """TestDataSettings. :ivar test_data_size: :vartype test_data_size: float """ _attribute_map = { 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, } def __init__( self, **kwargs ): """ :keyword test_data_size: :paramtype test_data_size: float """ super(TestDataSettings, self).__init__(**kwargs) self.test_data_size = kwargs.get('test_data_size', None) class Tool(msrest.serialization.Model): """Tool. :ivar name: :vartype name: str :ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm", "csharp". :vartype type: str or ~flow.models.ToolType :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.InputDefinition] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.OutputDefinition] :ivar description: :vartype description: str :ivar connection_type: :vartype connection_type: list[str or ~flow.models.ConnectionType] :ivar module: :vartype module: str :ivar class_name: :vartype class_name: str :ivar source: :vartype source: str :ivar lkg_code: :vartype lkg_code: str :ivar code: :vartype code: str :ivar function: :vartype function: str :ivar action_type: :vartype action_type: str :ivar provider_config: This is a dictionary. :vartype provider_config: dict[str, ~flow.models.InputDefinition] :ivar function_config: This is a dictionary. :vartype function_config: dict[str, ~flow.models.InputDefinition] :ivar icon: Anything. :vartype icon: any :ivar category: :vartype category: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, any] :ivar is_builtin: :vartype is_builtin: bool :ivar package: :vartype package: str :ivar package_version: :vartype package_version: str :ivar default_prompt: :vartype default_prompt: str :ivar enable_kwargs: :vartype enable_kwargs: bool :ivar deprecated_tools: :vartype deprecated_tools: list[str] :ivar tool_state: Possible values include: "Stable", "Preview", "Deprecated". :vartype tool_state: str or ~flow.models.ToolState """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '{InputDefinition}'}, 'outputs': {'key': 'outputs', 'type': '{OutputDefinition}'}, 'description': {'key': 'description', 'type': 'str'}, 'connection_type': {'key': 'connection_type', 'type': '[str]'}, 'module': {'key': 'module', 'type': 'str'}, 'class_name': {'key': 'class_name', 'type': 'str'}, 'source': {'key': 'source', 'type': 'str'}, 'lkg_code': {'key': 'lkgCode', 'type': 'str'}, 'code': {'key': 'code', 'type': 'str'}, 'function': {'key': 'function', 'type': 'str'}, 'action_type': {'key': 'action_type', 'type': 'str'}, 'provider_config': {'key': 'provider_config', 'type': '{InputDefinition}'}, 'function_config': {'key': 'function_config', 'type': '{InputDefinition}'}, 'icon': {'key': 'icon', 'type': 'object'}, 'category': {'key': 'category', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{object}'}, 'is_builtin': {'key': 'is_builtin', 'type': 'bool'}, 'package': {'key': 'package', 'type': 'str'}, 'package_version': {'key': 'package_version', 'type': 'str'}, 'default_prompt': {'key': 'default_prompt', 'type': 'str'}, 'enable_kwargs': {'key': 'enable_kwargs', 'type': 'bool'}, 'deprecated_tools': {'key': 'deprecated_tools', 'type': '[str]'}, 'tool_state': {'key': 'tool_state', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm", "csharp". :paramtype type: str or ~flow.models.ToolType :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.InputDefinition] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.OutputDefinition] :keyword description: :paramtype description: str :keyword connection_type: :paramtype connection_type: list[str or ~flow.models.ConnectionType] :keyword module: :paramtype module: str :keyword class_name: :paramtype class_name: str :keyword source: :paramtype source: str :keyword lkg_code: :paramtype lkg_code: str :keyword code: :paramtype code: str :keyword function: :paramtype function: str :keyword action_type: :paramtype action_type: str :keyword provider_config: This is a dictionary. :paramtype provider_config: dict[str, ~flow.models.InputDefinition] :keyword function_config: This is a dictionary. :paramtype function_config: dict[str, ~flow.models.InputDefinition] :keyword icon: Anything. :paramtype icon: any :keyword category: :paramtype category: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, any] :keyword is_builtin: :paramtype is_builtin: bool :keyword package: :paramtype package: str :keyword package_version: :paramtype package_version: str :keyword default_prompt: :paramtype default_prompt: str :keyword enable_kwargs: :paramtype enable_kwargs: bool :keyword deprecated_tools: :paramtype deprecated_tools: list[str] :keyword tool_state: Possible values include: "Stable", "Preview", "Deprecated". :paramtype tool_state: str or ~flow.models.ToolState """ super(Tool, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.type = kwargs.get('type', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.description = kwargs.get('description', None) self.connection_type = kwargs.get('connection_type', None) self.module = kwargs.get('module', None) self.class_name = kwargs.get('class_name', None) self.source = kwargs.get('source', None) self.lkg_code = kwargs.get('lkg_code', None) self.code = kwargs.get('code', None) self.function = kwargs.get('function', None) self.action_type = kwargs.get('action_type', None) self.provider_config = kwargs.get('provider_config', None) self.function_config = kwargs.get('function_config', None) self.icon = kwargs.get('icon', None) self.category = kwargs.get('category', None) self.tags = kwargs.get('tags', None) self.is_builtin = kwargs.get('is_builtin', None) self.package = kwargs.get('package', None) self.package_version = kwargs.get('package_version', None) self.default_prompt = kwargs.get('default_prompt', None) self.enable_kwargs = kwargs.get('enable_kwargs', None) self.deprecated_tools = kwargs.get('deprecated_tools', None) self.tool_state = kwargs.get('tool_state', None) class ToolFuncResponse(msrest.serialization.Model): """ToolFuncResponse. :ivar result: Anything. :vartype result: any :ivar logs: This is a dictionary. :vartype logs: dict[str, str] """ _attribute_map = { 'result': {'key': 'result', 'type': 'object'}, 'logs': {'key': 'logs', 'type': '{str}'}, } def __init__( self, **kwargs ): """ :keyword result: Anything. :paramtype result: any :keyword logs: This is a dictionary. :paramtype logs: dict[str, str] """ super(ToolFuncResponse, self).__init__(**kwargs) self.result = kwargs.get('result', None) self.logs = kwargs.get('logs', None) class ToolInputDynamicList(msrest.serialization.Model): """ToolInputDynamicList. :ivar func_path: :vartype func_path: str :ivar func_kwargs: :vartype func_kwargs: list[dict[str, any]] """ _attribute_map = { 'func_path': {'key': 'func_path', 'type': 'str'}, 'func_kwargs': {'key': 'func_kwargs', 'type': '[{object}]'}, } def __init__( self, **kwargs ): """ :keyword func_path: :paramtype func_path: str :keyword func_kwargs: :paramtype func_kwargs: list[dict[str, any]] """ super(ToolInputDynamicList, self).__init__(**kwargs) self.func_path = kwargs.get('func_path', None) self.func_kwargs = kwargs.get('func_kwargs', None) class ToolInputGeneratedBy(msrest.serialization.Model): """ToolInputGeneratedBy. :ivar func_path: :vartype func_path: str :ivar func_kwargs: :vartype func_kwargs: list[dict[str, any]] :ivar reverse_func_path: :vartype reverse_func_path: str """ _attribute_map = { 'func_path': {'key': 'func_path', 'type': 'str'}, 'func_kwargs': {'key': 'func_kwargs', 'type': '[{object}]'}, 'reverse_func_path': {'key': 'reverse_func_path', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword func_path: :paramtype func_path: str :keyword func_kwargs: :paramtype func_kwargs: list[dict[str, any]] :keyword reverse_func_path: :paramtype reverse_func_path: str """ super(ToolInputGeneratedBy, self).__init__(**kwargs) self.func_path = kwargs.get('func_path', None) self.func_kwargs = kwargs.get('func_kwargs', None) self.reverse_func_path = kwargs.get('reverse_func_path', None) class ToolMetaDto(msrest.serialization.Model): """ToolMetaDto. :ivar tools: This is a dictionary. :vartype tools: dict[str, ~flow.models.Tool] :ivar errors: This is a dictionary. :vartype errors: dict[str, ~flow.models.ErrorResponse] """ _attribute_map = { 'tools': {'key': 'tools', 'type': '{Tool}'}, 'errors': {'key': 'errors', 'type': '{ErrorResponse}'}, } def __init__( self, **kwargs ): """ :keyword tools: This is a dictionary. :paramtype tools: dict[str, ~flow.models.Tool] :keyword errors: This is a dictionary. :paramtype errors: dict[str, ~flow.models.ErrorResponse] """ super(ToolMetaDto, self).__init__(**kwargs) self.tools = kwargs.get('tools', None) self.errors = kwargs.get('errors', None) class ToolSetting(msrest.serialization.Model): """ToolSetting. :ivar providers: :vartype providers: list[~flow.models.ProviderEntity] """ _attribute_map = { 'providers': {'key': 'providers', 'type': '[ProviderEntity]'}, } def __init__( self, **kwargs ): """ :keyword providers: :paramtype providers: list[~flow.models.ProviderEntity] """ super(ToolSetting, self).__init__(**kwargs) self.providers = kwargs.get('providers', None) class ToolSourceMeta(msrest.serialization.Model): """ToolSourceMeta. :ivar tool_type: :vartype tool_type: str """ _attribute_map = { 'tool_type': {'key': 'tool_type', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword tool_type: :paramtype tool_type: str """ super(ToolSourceMeta, self).__init__(**kwargs) self.tool_type = kwargs.get('tool_type', None) class TorchDistributedConfiguration(msrest.serialization.Model): """TorchDistributedConfiguration. :ivar process_count_per_node: :vartype process_count_per_node: int """ _attribute_map = { 'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword process_count_per_node: :paramtype process_count_per_node: int """ super(TorchDistributedConfiguration, self).__init__(**kwargs) self.process_count_per_node = kwargs.get('process_count_per_node', None) class TrainingDiagnosticConfiguration(msrest.serialization.Model): """TrainingDiagnosticConfiguration. :ivar job_heart_beat_timeout_seconds: :vartype job_heart_beat_timeout_seconds: int """ _attribute_map = { 'job_heart_beat_timeout_seconds': {'key': 'jobHeartBeatTimeoutSeconds', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword job_heart_beat_timeout_seconds: :paramtype job_heart_beat_timeout_seconds: int """ super(TrainingDiagnosticConfiguration, self).__init__(**kwargs) self.job_heart_beat_timeout_seconds = kwargs.get('job_heart_beat_timeout_seconds', None) class TrainingOutput(msrest.serialization.Model): """TrainingOutput. :ivar training_output_type: Possible values include: "Metrics", "Model". :vartype training_output_type: str or ~flow.models.TrainingOutputType :ivar iteration: :vartype iteration: int :ivar metric: :vartype metric: str :ivar model_file: :vartype model_file: str """ _attribute_map = { 'training_output_type': {'key': 'trainingOutputType', 'type': 'str'}, 'iteration': {'key': 'iteration', 'type': 'int'}, 'metric': {'key': 'metric', 'type': 'str'}, 'model_file': {'key': 'modelFile', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword training_output_type: Possible values include: "Metrics", "Model". :paramtype training_output_type: str or ~flow.models.TrainingOutputType :keyword iteration: :paramtype iteration: int :keyword metric: :paramtype metric: str :keyword model_file: :paramtype model_file: str """ super(TrainingOutput, self).__init__(**kwargs) self.training_output_type = kwargs.get('training_output_type', None) self.iteration = kwargs.get('iteration', None) self.metric = kwargs.get('metric', None) self.model_file = kwargs.get('model_file', None) class TrainingSettings(msrest.serialization.Model): """TrainingSettings. :ivar block_list_models: :vartype block_list_models: list[str] :ivar allow_list_models: :vartype allow_list_models: list[str] :ivar enable_dnn_training: :vartype enable_dnn_training: bool :ivar enable_onnx_compatible_models: :vartype enable_onnx_compatible_models: bool :ivar stack_ensemble_settings: :vartype stack_ensemble_settings: ~flow.models.StackEnsembleSettings :ivar enable_stack_ensemble: :vartype enable_stack_ensemble: bool :ivar enable_vote_ensemble: :vartype enable_vote_ensemble: bool :ivar ensemble_model_download_timeout: :vartype ensemble_model_download_timeout: str :ivar enable_model_explainability: :vartype enable_model_explainability: bool :ivar training_mode: Possible values include: "Distributed", "NonDistributed", "Auto". :vartype training_mode: str or ~flow.models.TabularTrainingMode """ _attribute_map = { 'block_list_models': {'key': 'blockListModels', 'type': '[str]'}, 'allow_list_models': {'key': 'allowListModels', 'type': '[str]'}, 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'str'}, 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, 'training_mode': {'key': 'trainingMode', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword block_list_models: :paramtype block_list_models: list[str] :keyword allow_list_models: :paramtype allow_list_models: list[str] :keyword enable_dnn_training: :paramtype enable_dnn_training: bool :keyword enable_onnx_compatible_models: :paramtype enable_onnx_compatible_models: bool :keyword stack_ensemble_settings: :paramtype stack_ensemble_settings: ~flow.models.StackEnsembleSettings :keyword enable_stack_ensemble: :paramtype enable_stack_ensemble: bool :keyword enable_vote_ensemble: :paramtype enable_vote_ensemble: bool :keyword ensemble_model_download_timeout: :paramtype ensemble_model_download_timeout: str :keyword enable_model_explainability: :paramtype enable_model_explainability: bool :keyword training_mode: Possible values include: "Distributed", "NonDistributed", "Auto". :paramtype training_mode: str or ~flow.models.TabularTrainingMode """ super(TrainingSettings, self).__init__(**kwargs) self.block_list_models = kwargs.get('block_list_models', None) self.allow_list_models = kwargs.get('allow_list_models', None) self.enable_dnn_training = kwargs.get('enable_dnn_training', None) self.enable_onnx_compatible_models = kwargs.get('enable_onnx_compatible_models', None) self.stack_ensemble_settings = kwargs.get('stack_ensemble_settings', None) self.enable_stack_ensemble = kwargs.get('enable_stack_ensemble', None) self.enable_vote_ensemble = kwargs.get('enable_vote_ensemble', None) self.ensemble_model_download_timeout = kwargs.get('ensemble_model_download_timeout', None) self.enable_model_explainability = kwargs.get('enable_model_explainability', None) self.training_mode = kwargs.get('training_mode', None) class TriggerAsyncOperationStatus(msrest.serialization.Model): """TriggerAsyncOperationStatus. :ivar id: :vartype id: str :ivar operation_type: Possible values include: "Create", "Update", "Delete", "CreateOrUpdate". :vartype operation_type: str or ~flow.models.TriggerOperationType :ivar provisioning_status: Possible values include: "Creating", "Updating", "Deleting", "Succeeded", "Failed", "Canceled". :vartype provisioning_status: str or ~flow.models.ScheduleProvisioningStatus :ivar created_time: :vartype created_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse :ivar status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing", "EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent", "ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed", "MultipleChoices", "Ambiguous", "MovedPermanently", "Moved", "Found", "Redirect", "SeeOther", "RedirectMethod", "NotModified", "UseProxy", "Unused", "TemporaryRedirect", "RedirectKeepVerb", "PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired", "Forbidden", "NotFound", "MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired", "RequestTimeout", "Conflict", "Gone", "LengthRequired", "PreconditionFailed", "RequestEntityTooLarge", "RequestUriTooLong", "UnsupportedMediaType", "RequestedRangeNotSatisfiable", "ExpectationFailed", "MisdirectedRequest", "UnprocessableEntity", "Locked", "FailedDependency", "UpgradeRequired", "PreconditionRequired", "TooManyRequests", "RequestHeaderFieldsTooLarge", "UnavailableForLegalReasons", "InternalServerError", "NotImplemented", "BadGateway", "ServiceUnavailable", "GatewayTimeout", "HttpVersionNotSupported", "VariantAlsoNegotiates", "InsufficientStorage", "LoopDetected", "NotExtended", "NetworkAuthenticationRequired". :vartype status_code: str or ~flow.models.HttpStatusCode """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'operation_type': {'key': 'operationType', 'type': 'str'}, 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, 'status_code': {'key': 'statusCode', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword id: :paramtype id: str :keyword operation_type: Possible values include: "Create", "Update", "Delete", "CreateOrUpdate". :paramtype operation_type: str or ~flow.models.TriggerOperationType :keyword provisioning_status: Possible values include: "Creating", "Updating", "Deleting", "Succeeded", "Failed", "Canceled". :paramtype provisioning_status: str or ~flow.models.ScheduleProvisioningStatus :keyword created_time: :paramtype created_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse :keyword status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing", "EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent", "ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed", "MultipleChoices", "Ambiguous", "MovedPermanently", "Moved", "Found", "Redirect", "SeeOther", "RedirectMethod", "NotModified", "UseProxy", "Unused", "TemporaryRedirect", "RedirectKeepVerb", "PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired", "Forbidden", "NotFound", "MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired", "RequestTimeout", "Conflict", "Gone", "LengthRequired", "PreconditionFailed", "RequestEntityTooLarge", "RequestUriTooLong", "UnsupportedMediaType", "RequestedRangeNotSatisfiable", "ExpectationFailed", "MisdirectedRequest", "UnprocessableEntity", "Locked", "FailedDependency", "UpgradeRequired", "PreconditionRequired", "TooManyRequests", "RequestHeaderFieldsTooLarge", "UnavailableForLegalReasons", "InternalServerError", "NotImplemented", "BadGateway", "ServiceUnavailable", "GatewayTimeout", "HttpVersionNotSupported", "VariantAlsoNegotiates", "InsufficientStorage", "LoopDetected", "NotExtended", "NetworkAuthenticationRequired". :paramtype status_code: str or ~flow.models.HttpStatusCode """ super(TriggerAsyncOperationStatus, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.operation_type = kwargs.get('operation_type', None) self.provisioning_status = kwargs.get('provisioning_status', None) self.created_time = kwargs.get('created_time', None) self.end_time = kwargs.get('end_time', None) self.error = kwargs.get('error', None) self.status_code = kwargs.get('status_code', None) class TuningNodeSetting(msrest.serialization.Model): """TuningNodeSetting. :ivar variant_ids: :vartype variant_ids: list[str] """ _attribute_map = { 'variant_ids': {'key': 'variantIds', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword variant_ids: :paramtype variant_ids: list[str] """ super(TuningNodeSetting, self).__init__(**kwargs) self.variant_ids = kwargs.get('variant_ids', None) class TypedAssetReference(msrest.serialization.Model): """TypedAssetReference. :ivar asset_id: :vartype asset_id: str :ivar type: :vartype type: str """ _attribute_map = { 'asset_id': {'key': 'assetId', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword asset_id: :paramtype asset_id: str :keyword type: :paramtype type: str """ super(TypedAssetReference, self).__init__(**kwargs) self.asset_id = kwargs.get('asset_id', None) self.type = kwargs.get('type', None) class UIAzureOpenAIDeploymentNameSelector(msrest.serialization.Model): """UIAzureOpenAIDeploymentNameSelector. :ivar capabilities: :vartype capabilities: ~flow.models.UIAzureOpenAIModelCapabilities """ _attribute_map = { 'capabilities': {'key': 'Capabilities', 'type': 'UIAzureOpenAIModelCapabilities'}, } def __init__( self, **kwargs ): """ :keyword capabilities: :paramtype capabilities: ~flow.models.UIAzureOpenAIModelCapabilities """ super(UIAzureOpenAIDeploymentNameSelector, self).__init__(**kwargs) self.capabilities = kwargs.get('capabilities', None) class UIAzureOpenAIModelCapabilities(msrest.serialization.Model): """UIAzureOpenAIModelCapabilities. :ivar completion: :vartype completion: bool :ivar chat_completion: :vartype chat_completion: bool :ivar embeddings: :vartype embeddings: bool """ _attribute_map = { 'completion': {'key': 'Completion', 'type': 'bool'}, 'chat_completion': {'key': 'ChatCompletion', 'type': 'bool'}, 'embeddings': {'key': 'Embeddings', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword completion: :paramtype completion: bool :keyword chat_completion: :paramtype chat_completion: bool :keyword embeddings: :paramtype embeddings: bool """ super(UIAzureOpenAIModelCapabilities, self).__init__(**kwargs) self.completion = kwargs.get('completion', None) self.chat_completion = kwargs.get('chat_completion', None) self.embeddings = kwargs.get('embeddings', None) class UIColumnPicker(msrest.serialization.Model): """UIColumnPicker. :ivar column_picker_for: :vartype column_picker_for: str :ivar column_selection_categories: :vartype column_selection_categories: list[str] :ivar single_column_selection: :vartype single_column_selection: bool """ _attribute_map = { 'column_picker_for': {'key': 'columnPickerFor', 'type': 'str'}, 'column_selection_categories': {'key': 'columnSelectionCategories', 'type': '[str]'}, 'single_column_selection': {'key': 'singleColumnSelection', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword column_picker_for: :paramtype column_picker_for: str :keyword column_selection_categories: :paramtype column_selection_categories: list[str] :keyword single_column_selection: :paramtype single_column_selection: bool """ super(UIColumnPicker, self).__init__(**kwargs) self.column_picker_for = kwargs.get('column_picker_for', None) self.column_selection_categories = kwargs.get('column_selection_categories', None) self.single_column_selection = kwargs.get('single_column_selection', None) class UIComputeSelection(msrest.serialization.Model): """UIComputeSelection. :ivar compute_types: :vartype compute_types: list[str] :ivar require_gpu: :vartype require_gpu: bool :ivar os_types: :vartype os_types: list[str] :ivar support_serverless: :vartype support_serverless: bool :ivar compute_run_settings_mapping: Dictionary of <componentsΒ·10my8ojΒ·schemasΒ·uicomputeselectionΒ·propertiesΒ·computerunsettingsmappingΒ·additionalproperties>. :vartype compute_run_settings_mapping: dict[str, list[~flow.models.RunSettingParameter]] """ _attribute_map = { 'compute_types': {'key': 'computeTypes', 'type': '[str]'}, 'require_gpu': {'key': 'requireGpu', 'type': 'bool'}, 'os_types': {'key': 'osTypes', 'type': '[str]'}, 'support_serverless': {'key': 'supportServerless', 'type': 'bool'}, 'compute_run_settings_mapping': {'key': 'computeRunSettingsMapping', 'type': '{[RunSettingParameter]}'}, } def __init__( self, **kwargs ): """ :keyword compute_types: :paramtype compute_types: list[str] :keyword require_gpu: :paramtype require_gpu: bool :keyword os_types: :paramtype os_types: list[str] :keyword support_serverless: :paramtype support_serverless: bool :keyword compute_run_settings_mapping: Dictionary of <componentsΒ·10my8ojΒ·schemasΒ·uicomputeselectionΒ·propertiesΒ·computerunsettingsmappingΒ·additionalproperties>. :paramtype compute_run_settings_mapping: dict[str, list[~flow.models.RunSettingParameter]] """ super(UIComputeSelection, self).__init__(**kwargs) self.compute_types = kwargs.get('compute_types', None) self.require_gpu = kwargs.get('require_gpu', None) self.os_types = kwargs.get('os_types', None) self.support_serverless = kwargs.get('support_serverless', None) self.compute_run_settings_mapping = kwargs.get('compute_run_settings_mapping', None) class UIHyperparameterConfiguration(msrest.serialization.Model): """UIHyperparameterConfiguration. :ivar model_name_to_hyper_parameter_and_distribution_mapping: Dictionary of <componentsΒ·1nrp69tΒ·schemasΒ·uihyperparameterconfigurationΒ·propertiesΒ·modelnametohyperparameteranddistributionmappingΒ·additionalproperties>. :vartype model_name_to_hyper_parameter_and_distribution_mapping: dict[str, dict[str, list[str]]] :ivar distribution_parameters_mapping: Dictionary of <componentsΒ·d9plq4Β·schemasΒ·uihyperparameterconfigurationΒ·propertiesΒ·distributionparametersmappingΒ·additionalproperties>. :vartype distribution_parameters_mapping: dict[str, list[~flow.models.DistributionParameter]] :ivar json_schema: :vartype json_schema: str """ _attribute_map = { 'model_name_to_hyper_parameter_and_distribution_mapping': {'key': 'modelNameToHyperParameterAndDistributionMapping', 'type': '{{[str]}}'}, 'distribution_parameters_mapping': {'key': 'distributionParametersMapping', 'type': '{[DistributionParameter]}'}, 'json_schema': {'key': 'jsonSchema', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword model_name_to_hyper_parameter_and_distribution_mapping: Dictionary of <componentsΒ·1nrp69tΒ·schemasΒ·uihyperparameterconfigurationΒ·propertiesΒ·modelnametohyperparameteranddistributionmappingΒ·additionalproperties>. :paramtype model_name_to_hyper_parameter_and_distribution_mapping: dict[str, dict[str, list[str]]] :keyword distribution_parameters_mapping: Dictionary of <componentsΒ·d9plq4Β·schemasΒ·uihyperparameterconfigurationΒ·propertiesΒ·distributionparametersmappingΒ·additionalproperties>. :paramtype distribution_parameters_mapping: dict[str, list[~flow.models.DistributionParameter]] :keyword json_schema: :paramtype json_schema: str """ super(UIHyperparameterConfiguration, self).__init__(**kwargs) self.model_name_to_hyper_parameter_and_distribution_mapping = kwargs.get('model_name_to_hyper_parameter_and_distribution_mapping', None) self.distribution_parameters_mapping = kwargs.get('distribution_parameters_mapping', None) self.json_schema = kwargs.get('json_schema', None) class UIInputSetting(msrest.serialization.Model): """UIInputSetting. :ivar name: :vartype name: str :ivar data_delivery_mode: Possible values include: "Read-only mount", "Read-write mount", "Download", "Direct", "Evaluate mount", "Evaluate download", "Hdfs". :vartype data_delivery_mode: str or ~flow.models.UIInputDataDeliveryMode :ivar path_on_compute: :vartype path_on_compute: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'data_delivery_mode': {'key': 'dataDeliveryMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword name: :paramtype name: str :keyword data_delivery_mode: Possible values include: "Read-only mount", "Read-write mount", "Download", "Direct", "Evaluate mount", "Evaluate download", "Hdfs". :paramtype data_delivery_mode: str or ~flow.models.UIInputDataDeliveryMode :keyword path_on_compute: :paramtype path_on_compute: str """ super(UIInputSetting, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.data_delivery_mode = kwargs.get('data_delivery_mode', None) self.path_on_compute = kwargs.get('path_on_compute', None) class UIJsonEditor(msrest.serialization.Model): """UIJsonEditor. :ivar json_schema: :vartype json_schema: str """ _attribute_map = { 'json_schema': {'key': 'jsonSchema', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword json_schema: :paramtype json_schema: str """ super(UIJsonEditor, self).__init__(**kwargs) self.json_schema = kwargs.get('json_schema', None) class UIParameterHint(msrest.serialization.Model): """UIParameterHint. :ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection". :vartype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum :ivar column_picker: :vartype column_picker: ~flow.models.UIColumnPicker :ivar ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql". :vartype ui_script_language: str or ~flow.models.UIScriptLanguageEnum :ivar json_editor: :vartype json_editor: ~flow.models.UIJsonEditor :ivar prompt_flow_connection_selector: :vartype prompt_flow_connection_selector: ~flow.models.UIPromptFlowConnectionSelector :ivar azure_open_ai_deployment_name_selector: :vartype azure_open_ai_deployment_name_selector: ~flow.models.UIAzureOpenAIDeploymentNameSelector :ivar ux_ignore: :vartype ux_ignore: bool :ivar anonymous: :vartype anonymous: bool """ _attribute_map = { 'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'}, 'column_picker': {'key': 'columnPicker', 'type': 'UIColumnPicker'}, 'ui_script_language': {'key': 'uiScriptLanguage', 'type': 'str'}, 'json_editor': {'key': 'jsonEditor', 'type': 'UIJsonEditor'}, 'prompt_flow_connection_selector': {'key': 'PromptFlowConnectionSelector', 'type': 'UIPromptFlowConnectionSelector'}, 'azure_open_ai_deployment_name_selector': {'key': 'AzureOpenAIDeploymentNameSelector', 'type': 'UIAzureOpenAIDeploymentNameSelector'}, 'ux_ignore': {'key': 'UxIgnore', 'type': 'bool'}, 'anonymous': {'key': 'Anonymous', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection". :paramtype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum :keyword column_picker: :paramtype column_picker: ~flow.models.UIColumnPicker :keyword ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql". :paramtype ui_script_language: str or ~flow.models.UIScriptLanguageEnum :keyword json_editor: :paramtype json_editor: ~flow.models.UIJsonEditor :keyword prompt_flow_connection_selector: :paramtype prompt_flow_connection_selector: ~flow.models.UIPromptFlowConnectionSelector :keyword azure_open_ai_deployment_name_selector: :paramtype azure_open_ai_deployment_name_selector: ~flow.models.UIAzureOpenAIDeploymentNameSelector :keyword ux_ignore: :paramtype ux_ignore: bool :keyword anonymous: :paramtype anonymous: bool """ super(UIParameterHint, self).__init__(**kwargs) self.ui_widget_type = kwargs.get('ui_widget_type', None) self.column_picker = kwargs.get('column_picker', None) self.ui_script_language = kwargs.get('ui_script_language', None) self.json_editor = kwargs.get('json_editor', None) self.prompt_flow_connection_selector = kwargs.get('prompt_flow_connection_selector', None) self.azure_open_ai_deployment_name_selector = kwargs.get('azure_open_ai_deployment_name_selector', None) self.ux_ignore = kwargs.get('ux_ignore', None) self.anonymous = kwargs.get('anonymous', None) class UIPromptFlowConnectionSelector(msrest.serialization.Model): """UIPromptFlowConnectionSelector. :ivar prompt_flow_connection_type: :vartype prompt_flow_connection_type: str """ _attribute_map = { 'prompt_flow_connection_type': {'key': 'PromptFlowConnectionType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword prompt_flow_connection_type: :paramtype prompt_flow_connection_type: str """ super(UIPromptFlowConnectionSelector, self).__init__(**kwargs) self.prompt_flow_connection_type = kwargs.get('prompt_flow_connection_type', None) class UIWidgetMetaInfo(msrest.serialization.Model): """UIWidgetMetaInfo. :ivar module_node_id: :vartype module_node_id: str :ivar meta_module_id: :vartype meta_module_id: str :ivar parameter_name: :vartype parameter_name: str :ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection". :vartype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum """ _attribute_map = { 'module_node_id': {'key': 'moduleNodeId', 'type': 'str'}, 'meta_module_id': {'key': 'metaModuleId', 'type': 'str'}, 'parameter_name': {'key': 'parameterName', 'type': 'str'}, 'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword module_node_id: :paramtype module_node_id: str :keyword meta_module_id: :paramtype meta_module_id: str :keyword parameter_name: :paramtype parameter_name: str :keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection". :paramtype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum """ super(UIWidgetMetaInfo, self).__init__(**kwargs) self.module_node_id = kwargs.get('module_node_id', None) self.meta_module_id = kwargs.get('meta_module_id', None) self.parameter_name = kwargs.get('parameter_name', None) self.ui_widget_type = kwargs.get('ui_widget_type', None) class UIYamlEditor(msrest.serialization.Model): """UIYamlEditor. :ivar json_schema: :vartype json_schema: str """ _attribute_map = { 'json_schema': {'key': 'jsonSchema', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword json_schema: :paramtype json_schema: str """ super(UIYamlEditor, self).__init__(**kwargs) self.json_schema = kwargs.get('json_schema', None) class UnversionedEntityRequestDto(msrest.serialization.Model): """UnversionedEntityRequestDto. :ivar unversioned_entity_ids: :vartype unversioned_entity_ids: list[str] """ _attribute_map = { 'unversioned_entity_ids': {'key': 'unversionedEntityIds', 'type': '[str]'}, } def __init__( self, **kwargs ): """ :keyword unversioned_entity_ids: :paramtype unversioned_entity_ids: list[str] """ super(UnversionedEntityRequestDto, self).__init__(**kwargs) self.unversioned_entity_ids = kwargs.get('unversioned_entity_ids', None) class UnversionedEntityResponseDto(msrest.serialization.Model): """UnversionedEntityResponseDto. :ivar unversioned_entities: :vartype unversioned_entities: list[~flow.models.FlowIndexEntity] :ivar unversioned_entity_json_schema: Anything. :vartype unversioned_entity_json_schema: any :ivar normalized_request_charge: :vartype normalized_request_charge: float :ivar normalized_request_charge_period: :vartype normalized_request_charge_period: str """ _attribute_map = { 'unversioned_entities': {'key': 'unversionedEntities', 'type': '[FlowIndexEntity]'}, 'unversioned_entity_json_schema': {'key': 'unversionedEntityJsonSchema', 'type': 'object'}, 'normalized_request_charge': {'key': 'normalizedRequestCharge', 'type': 'float'}, 'normalized_request_charge_period': {'key': 'normalizedRequestChargePeriod', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword unversioned_entities: :paramtype unversioned_entities: list[~flow.models.FlowIndexEntity] :keyword unversioned_entity_json_schema: Anything. :paramtype unversioned_entity_json_schema: any :keyword normalized_request_charge: :paramtype normalized_request_charge: float :keyword normalized_request_charge_period: :paramtype normalized_request_charge_period: str """ super(UnversionedEntityResponseDto, self).__init__(**kwargs) self.unversioned_entities = kwargs.get('unversioned_entities', None) self.unversioned_entity_json_schema = kwargs.get('unversioned_entity_json_schema', None) self.normalized_request_charge = kwargs.get('normalized_request_charge', None) self.normalized_request_charge_period = kwargs.get('normalized_request_charge_period', None) class UnversionedRebuildIndexDto(msrest.serialization.Model): """UnversionedRebuildIndexDto. :ivar continuation_token: :vartype continuation_token: str :ivar entity_count: :vartype entity_count: int :ivar entity_container_type: :vartype entity_container_type: str :ivar entity_type: :vartype entity_type: str :ivar resource_id: :vartype resource_id: str :ivar workspace_id: :vartype workspace_id: str :ivar immutable_resource_id: :vartype immutable_resource_id: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime """ _attribute_map = { 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'entity_count': {'key': 'entityCount', 'type': 'int'}, 'entity_container_type': {'key': 'entityContainerType', 'type': 'str'}, 'entity_type': {'key': 'entityType', 'type': 'str'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'workspace_id': {'key': 'workspaceId', 'type': 'str'}, 'immutable_resource_id': {'key': 'immutableResourceId', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): """ :keyword continuation_token: :paramtype continuation_token: str :keyword entity_count: :paramtype entity_count: int :keyword entity_container_type: :paramtype entity_container_type: str :keyword entity_type: :paramtype entity_type: str :keyword resource_id: :paramtype resource_id: str :keyword workspace_id: :paramtype workspace_id: str :keyword immutable_resource_id: :paramtype immutable_resource_id: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime """ super(UnversionedRebuildIndexDto, self).__init__(**kwargs) self.continuation_token = kwargs.get('continuation_token', None) self.entity_count = kwargs.get('entity_count', None) self.entity_container_type = kwargs.get('entity_container_type', None) self.entity_type = kwargs.get('entity_type', None) self.resource_id = kwargs.get('resource_id', None) self.workspace_id = kwargs.get('workspace_id', None) self.immutable_resource_id = kwargs.get('immutable_resource_id', None) self.start_time = kwargs.get('start_time', None) self.end_time = kwargs.get('end_time', None) class UnversionedRebuildResponseDto(msrest.serialization.Model): """UnversionedRebuildResponseDto. :ivar entities: :vartype entities: ~flow.models.SegmentedResult1 :ivar unversioned_entity_schema: Anything. :vartype unversioned_entity_schema: any :ivar normalized_request_charge: :vartype normalized_request_charge: float :ivar normalized_request_charge_period: :vartype normalized_request_charge_period: str """ _attribute_map = { 'entities': {'key': 'entities', 'type': 'SegmentedResult1'}, 'unversioned_entity_schema': {'key': 'unversionedEntitySchema', 'type': 'object'}, 'normalized_request_charge': {'key': 'normalizedRequestCharge', 'type': 'float'}, 'normalized_request_charge_period': {'key': 'normalizedRequestChargePeriod', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword entities: :paramtype entities: ~flow.models.SegmentedResult1 :keyword unversioned_entity_schema: Anything. :paramtype unversioned_entity_schema: any :keyword normalized_request_charge: :paramtype normalized_request_charge: float :keyword normalized_request_charge_period: :paramtype normalized_request_charge_period: str """ super(UnversionedRebuildResponseDto, self).__init__(**kwargs) self.entities = kwargs.get('entities', None) self.unversioned_entity_schema = kwargs.get('unversioned_entity_schema', None) self.normalized_request_charge = kwargs.get('normalized_request_charge', None) self.normalized_request_charge_period = kwargs.get('normalized_request_charge_period', None) class UpdateComponentRequest(msrest.serialization.Model): """UpdateComponentRequest. :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar module_update_operation_type: Possible values include: "SetDefaultVersion", "EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags". :vartype module_update_operation_type: str or ~flow.models.ModuleUpdateOperationType :ivar module_version: :vartype module_version: str """ _attribute_map = { 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'module_update_operation_type': {'key': 'moduleUpdateOperationType', 'type': 'str'}, 'module_version': {'key': 'moduleVersion', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword module_update_operation_type: Possible values include: "SetDefaultVersion", "EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags". :paramtype module_update_operation_type: str or ~flow.models.ModuleUpdateOperationType :keyword module_version: :paramtype module_version: str """ super(UpdateComponentRequest, self).__init__(**kwargs) self.display_name = kwargs.get('display_name', None) self.description = kwargs.get('description', None) self.tags = kwargs.get('tags', None) self.module_update_operation_type = kwargs.get('module_update_operation_type', None) self.module_version = kwargs.get('module_version', None) class UpdateFlowRequest(msrest.serialization.Model): """UpdateFlowRequest. :ivar flow_run_result: :vartype flow_run_result: ~flow.models.FlowRunResult :ivar flow_test_mode: Possible values include: "Sync", "Async". :vartype flow_test_mode: str or ~flow.models.FlowTestMode :ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :ivar flow_name: :vartype flow_name: str :ivar description: :vartype description: str :ivar details: :vartype details: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar flow: :vartype flow: ~flow.models.Flow :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar flow_run_settings: :vartype flow_run_settings: ~flow.models.FlowRunSettings :ivar is_archived: :vartype is_archived: bool :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar identity: :vartype identity: str """ _attribute_map = { 'flow_run_result': {'key': 'flowRunResult', 'type': 'FlowRunResult'}, 'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'}, 'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'details': {'key': 'details', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'flow': {'key': 'flow', 'type': 'Flow'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword flow_run_result: :paramtype flow_run_result: ~flow.models.FlowRunResult :keyword flow_test_mode: Possible values include: "Sync", "Async". :paramtype flow_test_mode: str or ~flow.models.FlowTestMode :keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :keyword flow_name: :paramtype flow_name: str :keyword description: :paramtype description: str :keyword details: :paramtype details: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword flow: :paramtype flow: ~flow.models.Flow :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword flow_run_settings: :paramtype flow_run_settings: ~flow.models.FlowRunSettings :keyword is_archived: :paramtype is_archived: bool :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword identity: :paramtype identity: str """ super(UpdateFlowRequest, self).__init__(**kwargs) self.flow_run_result = kwargs.get('flow_run_result', None) self.flow_test_mode = kwargs.get('flow_test_mode', None) self.flow_test_infos = kwargs.get('flow_test_infos', None) self.flow_name = kwargs.get('flow_name', None) self.description = kwargs.get('description', None) self.details = kwargs.get('details', None) self.tags = kwargs.get('tags', None) self.flow = kwargs.get('flow', None) self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None) self.flow_type = kwargs.get('flow_type', None) self.flow_run_settings = kwargs.get('flow_run_settings', None) self.is_archived = kwargs.get('is_archived', None) self.vm_size = kwargs.get('vm_size', None) self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None) self.identity = kwargs.get('identity', None) class UpdateFlowRuntimeRequest(msrest.serialization.Model): """UpdateFlowRuntimeRequest. :ivar runtime_description: :vartype runtime_description: str :ivar environment: :vartype environment: str :ivar instance_count: :vartype instance_count: int """ _attribute_map = { 'runtime_description': {'key': 'runtimeDescription', 'type': 'str'}, 'environment': {'key': 'environment', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, } def __init__( self, **kwargs ): """ :keyword runtime_description: :paramtype runtime_description: str :keyword environment: :paramtype environment: str :keyword instance_count: :paramtype instance_count: int """ super(UpdateFlowRuntimeRequest, self).__init__(**kwargs) self.runtime_description = kwargs.get('runtime_description', None) self.environment = kwargs.get('environment', None) self.instance_count = kwargs.get('instance_count', None) class UpdateRegistryComponentRequest(msrest.serialization.Model): """UpdateRegistryComponentRequest. :ivar registry_name: :vartype registry_name: str :ivar component_name: :vartype component_name: str :ivar component_version: :vartype component_version: str :ivar update_type: The only acceptable values to pass in are None and "SetDefaultVersion". The default value is None. :vartype update_type: str """ _attribute_map = { 'registry_name': {'key': 'registryName', 'type': 'str'}, 'component_name': {'key': 'componentName', 'type': 'str'}, 'component_version': {'key': 'componentVersion', 'type': 'str'}, 'update_type': {'key': 'updateType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword registry_name: :paramtype registry_name: str :keyword component_name: :paramtype component_name: str :keyword component_version: :paramtype component_version: str :keyword update_type: The only acceptable values to pass in are None and "SetDefaultVersion". The default value is None. :paramtype update_type: str """ super(UpdateRegistryComponentRequest, self).__init__(**kwargs) self.registry_name = kwargs.get('registry_name', None) self.component_name = kwargs.get('component_name', None) self.component_version = kwargs.get('component_version', None) self.update_type = kwargs.get('update_type', None) class UploadOptions(msrest.serialization.Model): """UploadOptions. :ivar overwrite: :vartype overwrite: bool :ivar source_globs: :vartype source_globs: ~flow.models.ExecutionGlobsOptions """ _attribute_map = { 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'source_globs': {'key': 'sourceGlobs', 'type': 'ExecutionGlobsOptions'}, } def __init__( self, **kwargs ): """ :keyword overwrite: :paramtype overwrite: bool :keyword source_globs: :paramtype source_globs: ~flow.models.ExecutionGlobsOptions """ super(UploadOptions, self).__init__(**kwargs) self.overwrite = kwargs.get('overwrite', None) self.source_globs = kwargs.get('source_globs', None) class UriReference(msrest.serialization.Model): """UriReference. :ivar path: :vartype path: str :ivar is_file: :vartype is_file: bool """ _attribute_map = { 'path': {'key': 'path', 'type': 'str'}, 'is_file': {'key': 'isFile', 'type': 'bool'}, } def __init__( self, **kwargs ): """ :keyword path: :paramtype path: str :keyword is_file: :paramtype is_file: bool """ super(UriReference, self).__init__(**kwargs) self.path = kwargs.get('path', None) self.is_file = kwargs.get('is_file', None) class User(msrest.serialization.Model): """User. :ivar user_object_id: A user or service principal's object ID. This is EUPI and may only be logged to warm path telemetry. :vartype user_object_id: str :ivar user_pu_id: A user or service principal's PuID. This is PII and should never be logged. :vartype user_pu_id: str :ivar user_idp: A user identity provider. Eg live.com This is PII and should never be logged. :vartype user_idp: str :ivar user_alt_sec_id: A user alternate sec id. This represents the user in a different identity provider system Eg.1:live.com:puid This is PII and should never be logged. :vartype user_alt_sec_id: str :ivar user_iss: The issuer which issed the token for this user. This is PII and should never be logged. :vartype user_iss: str :ivar user_tenant_id: A user or service principal's tenant ID. :vartype user_tenant_id: str :ivar user_name: A user's full name or a service principal's app ID. This is PII and should never be logged. :vartype user_name: str :ivar upn: A user's Principal name (upn) This is PII andshould never be logged. :vartype upn: str """ _attribute_map = { 'user_object_id': {'key': 'userObjectId', 'type': 'str'}, 'user_pu_id': {'key': 'userPuId', 'type': 'str'}, 'user_idp': {'key': 'userIdp', 'type': 'str'}, 'user_alt_sec_id': {'key': 'userAltSecId', 'type': 'str'}, 'user_iss': {'key': 'userIss', 'type': 'str'}, 'user_tenant_id': {'key': 'userTenantId', 'type': 'str'}, 'user_name': {'key': 'userName', 'type': 'str'}, 'upn': {'key': 'upn', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword user_object_id: A user or service principal's object ID. This is EUPI and may only be logged to warm path telemetry. :paramtype user_object_id: str :keyword user_pu_id: A user or service principal's PuID. This is PII and should never be logged. :paramtype user_pu_id: str :keyword user_idp: A user identity provider. Eg live.com This is PII and should never be logged. :paramtype user_idp: str :keyword user_alt_sec_id: A user alternate sec id. This represents the user in a different identity provider system Eg.1:live.com:puid This is PII and should never be logged. :paramtype user_alt_sec_id: str :keyword user_iss: The issuer which issed the token for this user. This is PII and should never be logged. :paramtype user_iss: str :keyword user_tenant_id: A user or service principal's tenant ID. :paramtype user_tenant_id: str :keyword user_name: A user's full name or a service principal's app ID. This is PII and should never be logged. :paramtype user_name: str :keyword upn: A user's Principal name (upn) This is PII andshould never be logged. :paramtype upn: str """ super(User, self).__init__(**kwargs) self.user_object_id = kwargs.get('user_object_id', None) self.user_pu_id = kwargs.get('user_pu_id', None) self.user_idp = kwargs.get('user_idp', None) self.user_alt_sec_id = kwargs.get('user_alt_sec_id', None) self.user_iss = kwargs.get('user_iss', None) self.user_tenant_id = kwargs.get('user_tenant_id', None) self.user_name = kwargs.get('user_name', None) self.upn = kwargs.get('upn', None) class UserAssignedIdentity(msrest.serialization.Model): """UserAssignedIdentity. :ivar principal_id: :vartype principal_id: str :ivar client_id: :vartype client_id: str """ _attribute_map = { 'principal_id': {'key': 'principalId', 'type': 'str'}, 'client_id': {'key': 'clientId', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword principal_id: :paramtype principal_id: str :keyword client_id: :paramtype client_id: str """ super(UserAssignedIdentity, self).__init__(**kwargs) self.principal_id = kwargs.get('principal_id', None) self.client_id = kwargs.get('client_id', None) class ValidationDataSettings(msrest.serialization.Model): """ValidationDataSettings. :ivar n_cross_validations: :vartype n_cross_validations: ~flow.models.NCrossValidations :ivar validation_data_size: :vartype validation_data_size: float :ivar cv_split_column_names: :vartype cv_split_column_names: list[str] :ivar validation_type: :vartype validation_type: str """ _attribute_map = { 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, 'validation_type': {'key': 'validationType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword n_cross_validations: :paramtype n_cross_validations: ~flow.models.NCrossValidations :keyword validation_data_size: :paramtype validation_data_size: float :keyword cv_split_column_names: :paramtype cv_split_column_names: list[str] :keyword validation_type: :paramtype validation_type: str """ super(ValidationDataSettings, self).__init__(**kwargs) self.n_cross_validations = kwargs.get('n_cross_validations', None) self.validation_data_size = kwargs.get('validation_data_size', None) self.cv_split_column_names = kwargs.get('cv_split_column_names', None) self.validation_type = kwargs.get('validation_type', None) class VariantNode(msrest.serialization.Model): """VariantNode. :ivar node: :vartype node: ~flow.models.Node :ivar description: :vartype description: str """ _attribute_map = { 'node': {'key': 'node', 'type': 'Node'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword node: :paramtype node: ~flow.models.Node :keyword description: :paramtype description: str """ super(VariantNode, self).__init__(**kwargs) self.node = kwargs.get('node', None) self.description = kwargs.get('description', None) class Webhook(msrest.serialization.Model): """Webhook. :ivar webhook_type: The only acceptable values to pass in are None and "AzureDevOps". The default value is None. :vartype webhook_type: str :ivar event_type: :vartype event_type: str """ _attribute_map = { 'webhook_type': {'key': 'webhookType', 'type': 'str'}, 'event_type': {'key': 'eventType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword webhook_type: The only acceptable values to pass in are None and "AzureDevOps". The default value is None. :paramtype webhook_type: str :keyword event_type: :paramtype event_type: str """ super(Webhook, self).__init__(**kwargs) self.webhook_type = kwargs.get('webhook_type', None) self.event_type = kwargs.get('event_type', None) class WebServiceComputeMetaInfo(msrest.serialization.Model): """WebServiceComputeMetaInfo. :ivar node_count: :vartype node_count: int :ivar is_ssl_enabled: :vartype is_ssl_enabled: bool :ivar aks_not_found: :vartype aks_not_found: bool :ivar cluster_purpose: :vartype cluster_purpose: str :ivar public_ip_address: :vartype public_ip_address: str :ivar vm_size: :vartype vm_size: str :ivar location: :vartype location: str :ivar provisioning_state: :vartype provisioning_state: str :ivar state: :vartype state: str :ivar os_type: :vartype os_type: str :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar created_by_studio: :vartype created_by_studio: bool :ivar is_gpu_type: :vartype is_gpu_type: bool :ivar resource_id: :vartype resource_id: str :ivar compute_type: :vartype compute_type: str """ _attribute_map = { 'node_count': {'key': 'nodeCount', 'type': 'int'}, 'is_ssl_enabled': {'key': 'isSslEnabled', 'type': 'bool'}, 'aks_not_found': {'key': 'aksNotFound', 'type': 'bool'}, 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'}, 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'}, 'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword node_count: :paramtype node_count: int :keyword is_ssl_enabled: :paramtype is_ssl_enabled: bool :keyword aks_not_found: :paramtype aks_not_found: bool :keyword cluster_purpose: :paramtype cluster_purpose: str :keyword public_ip_address: :paramtype public_ip_address: str :keyword vm_size: :paramtype vm_size: str :keyword location: :paramtype location: str :keyword provisioning_state: :paramtype provisioning_state: str :keyword state: :paramtype state: str :keyword os_type: :paramtype os_type: str :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword created_by_studio: :paramtype created_by_studio: bool :keyword is_gpu_type: :paramtype is_gpu_type: bool :keyword resource_id: :paramtype resource_id: str :keyword compute_type: :paramtype compute_type: str """ super(WebServiceComputeMetaInfo, self).__init__(**kwargs) self.node_count = kwargs.get('node_count', None) self.is_ssl_enabled = kwargs.get('is_ssl_enabled', None) self.aks_not_found = kwargs.get('aks_not_found', None) self.cluster_purpose = kwargs.get('cluster_purpose', None) self.public_ip_address = kwargs.get('public_ip_address', None) self.vm_size = kwargs.get('vm_size', None) self.location = kwargs.get('location', None) self.provisioning_state = kwargs.get('provisioning_state', None) self.state = kwargs.get('state', None) self.os_type = kwargs.get('os_type', None) self.id = kwargs.get('id', None) self.name = kwargs.get('name', None) self.created_by_studio = kwargs.get('created_by_studio', None) self.is_gpu_type = kwargs.get('is_gpu_type', None) self.resource_id = kwargs.get('resource_id', None) self.compute_type = kwargs.get('compute_type', None) class WebServicePort(msrest.serialization.Model): """WebServicePort. :ivar node_id: :vartype node_id: str :ivar port_name: :vartype port_name: str :ivar name: :vartype name: str """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword port_name: :paramtype port_name: str :keyword name: :paramtype name: str """ super(WebServicePort, self).__init__(**kwargs) self.node_id = kwargs.get('node_id', None) self.port_name = kwargs.get('port_name', None) self.name = kwargs.get('name', None) class WorkspaceConnectionSpec(msrest.serialization.Model): """WorkspaceConnectionSpec. :ivar connection_category: Possible values include: "PythonFeed", "ACR", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "AzureDataLakeGen2", "Redis", "ApiKey", "AzureOpenAI", "CognitiveSearch", "CognitiveService", "CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", "CosmosDbMongoDbApi", "AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", "AzureSqlMi", "AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", "AmazonRedshift", "Db2", "Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", "Informix", "MariaDb", "MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", "PostgreSql", "Presto", "SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", "Sybase", "Teradata", "Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", "AmazonS3Compatible", "FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", "OracleCloudStorage", "Sftp", "GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", "Concur", "Dynamics", "DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", "Magento", "Marketo", "Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", "QuickBooks", "Salesforce", "SalesforceServiceCloud", "SalesforceMarketingCloud", "SapCloudForCustomer", "SapEcc", "ServiceNow", "SharePointOnlineList", "Shopify", "Square", "WebTable", "Xero", "Zoho", "GenericContainerRegistry". :vartype connection_category: str or ~flow.models.ConnectionCategory :ivar flow_value_type: Possible values include: "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image". :vartype flow_value_type: str or ~flow.models.ValueType :ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :vartype connection_type: str or ~flow.models.ConnectionType :ivar connection_type_display_name: :vartype connection_type_display_name: str :ivar config_specs: :vartype config_specs: list[~flow.models.ConnectionConfigSpec] :ivar module: :vartype module: str """ _attribute_map = { 'connection_category': {'key': 'connectionCategory', 'type': 'str'}, 'flow_value_type': {'key': 'flowValueType', 'type': 'str'}, 'connection_type': {'key': 'connectionType', 'type': 'str'}, 'connection_type_display_name': {'key': 'connectionTypeDisplayName', 'type': 'str'}, 'config_specs': {'key': 'configSpecs', 'type': '[ConnectionConfigSpec]'}, 'module': {'key': 'module', 'type': 'str'}, } def __init__( self, **kwargs ): """ :keyword connection_category: Possible values include: "PythonFeed", "ACR", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "AzureDataLakeGen2", "Redis", "ApiKey", "AzureOpenAI", "CognitiveSearch", "CognitiveService", "CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", "CosmosDbMongoDbApi", "AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", "AzureSqlMi", "AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", "AmazonRedshift", "Db2", "Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", "Informix", "MariaDb", "MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", "PostgreSql", "Presto", "SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", "Sybase", "Teradata", "Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", "AmazonS3Compatible", "FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", "OracleCloudStorage", "Sftp", "GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", "Concur", "Dynamics", "DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", "Magento", "Marketo", "Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", "QuickBooks", "Salesforce", "SalesforceServiceCloud", "SalesforceMarketingCloud", "SapCloudForCustomer", "SapEcc", "ServiceNow", "SharePointOnlineList", "Shopify", "Square", "WebTable", "Xero", "Zoho", "GenericContainerRegistry". :paramtype connection_category: str or ~flow.models.ConnectionCategory :keyword flow_value_type: Possible values include: "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image". :paramtype flow_value_type: str or ~flow.models.ValueType :keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :paramtype connection_type: str or ~flow.models.ConnectionType :keyword connection_type_display_name: :paramtype connection_type_display_name: str :keyword config_specs: :paramtype config_specs: list[~flow.models.ConnectionConfigSpec] :keyword module: :paramtype module: str """ super(WorkspaceConnectionSpec, self).__init__(**kwargs) self.connection_category = kwargs.get('connection_category', None) self.flow_value_type = kwargs.get('flow_value_type', None) self.connection_type = kwargs.get('connection_type', None) self.connection_type_display_name = kwargs.get('connection_type_display_name', None) self.config_specs = kwargs.get('config_specs', None) self.module = kwargs.get('module', None)
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/models/_models_py3.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime from typing import Any, Dict, IO, List, Optional, Union from azure.core.exceptions import HttpResponseError import msrest.serialization from ._azure_machine_learning_designer_service_client_enums import * class ACIAdvanceSettings(msrest.serialization.Model): """ACIAdvanceSettings. :ivar container_resource_requirements: :vartype container_resource_requirements: ~flow.models.ContainerResourceRequirements :ivar app_insights_enabled: :vartype app_insights_enabled: bool :ivar ssl_enabled: :vartype ssl_enabled: bool :ivar ssl_certificate: :vartype ssl_certificate: str :ivar ssl_key: :vartype ssl_key: str :ivar c_name: :vartype c_name: str :ivar dns_name_label: :vartype dns_name_label: str """ _attribute_map = { 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'}, 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, 'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'}, 'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'}, 'ssl_key': {'key': 'sslKey', 'type': 'str'}, 'c_name': {'key': 'cName', 'type': 'str'}, 'dns_name_label': {'key': 'dnsNameLabel', 'type': 'str'}, } def __init__( self, *, container_resource_requirements: Optional["ContainerResourceRequirements"] = None, app_insights_enabled: Optional[bool] = None, ssl_enabled: Optional[bool] = None, ssl_certificate: Optional[str] = None, ssl_key: Optional[str] = None, c_name: Optional[str] = None, dns_name_label: Optional[str] = None, **kwargs ): """ :keyword container_resource_requirements: :paramtype container_resource_requirements: ~flow.models.ContainerResourceRequirements :keyword app_insights_enabled: :paramtype app_insights_enabled: bool :keyword ssl_enabled: :paramtype ssl_enabled: bool :keyword ssl_certificate: :paramtype ssl_certificate: str :keyword ssl_key: :paramtype ssl_key: str :keyword c_name: :paramtype c_name: str :keyword dns_name_label: :paramtype dns_name_label: str """ super(ACIAdvanceSettings, self).__init__(**kwargs) self.container_resource_requirements = container_resource_requirements self.app_insights_enabled = app_insights_enabled self.ssl_enabled = ssl_enabled self.ssl_certificate = ssl_certificate self.ssl_key = ssl_key self.c_name = c_name self.dns_name_label = dns_name_label class Activate(msrest.serialization.Model): """Activate. :ivar when: :vartype when: str :ivar is_property: Anything. :vartype is_property: any """ _attribute_map = { 'when': {'key': 'when', 'type': 'str'}, 'is_property': {'key': 'is', 'type': 'object'}, } def __init__( self, *, when: Optional[str] = None, is_property: Optional[Any] = None, **kwargs ): """ :keyword when: :paramtype when: str :keyword is_property: Anything. :paramtype is_property: any """ super(Activate, self).__init__(**kwargs) self.when = when self.is_property = is_property class AdditionalErrorInfo(msrest.serialization.Model): """AdditionalErrorInfo. :ivar type: :vartype type: str :ivar info: Anything. :vartype info: any """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'info': {'key': 'info', 'type': 'object'}, } def __init__( self, *, type: Optional[str] = None, info: Optional[Any] = None, **kwargs ): """ :keyword type: :paramtype type: str :keyword info: Anything. :paramtype info: any """ super(AdditionalErrorInfo, self).__init__(**kwargs) self.type = type self.info = info class AdhocTriggerScheduledCommandJobRequest(msrest.serialization.Model): """AdhocTriggerScheduledCommandJobRequest. :ivar job_name: :vartype job_name: str :ivar job_display_name: :vartype job_display_name: str :ivar trigger_time_string: :vartype trigger_time_string: str """ _attribute_map = { 'job_name': {'key': 'jobName', 'type': 'str'}, 'job_display_name': {'key': 'jobDisplayName', 'type': 'str'}, 'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'}, } def __init__( self, *, job_name: Optional[str] = None, job_display_name: Optional[str] = None, trigger_time_string: Optional[str] = None, **kwargs ): """ :keyword job_name: :paramtype job_name: str :keyword job_display_name: :paramtype job_display_name: str :keyword trigger_time_string: :paramtype trigger_time_string: str """ super(AdhocTriggerScheduledCommandJobRequest, self).__init__(**kwargs) self.job_name = job_name self.job_display_name = job_display_name self.trigger_time_string = trigger_time_string class AdhocTriggerScheduledSparkJobRequest(msrest.serialization.Model): """AdhocTriggerScheduledSparkJobRequest. :ivar job_name: :vartype job_name: str :ivar job_display_name: :vartype job_display_name: str :ivar trigger_time_string: :vartype trigger_time_string: str """ _attribute_map = { 'job_name': {'key': 'jobName', 'type': 'str'}, 'job_display_name': {'key': 'jobDisplayName', 'type': 'str'}, 'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'}, } def __init__( self, *, job_name: Optional[str] = None, job_display_name: Optional[str] = None, trigger_time_string: Optional[str] = None, **kwargs ): """ :keyword job_name: :paramtype job_name: str :keyword job_display_name: :paramtype job_display_name: str :keyword trigger_time_string: :paramtype trigger_time_string: str """ super(AdhocTriggerScheduledSparkJobRequest, self).__init__(**kwargs) self.job_name = job_name self.job_display_name = job_display_name self.trigger_time_string = trigger_time_string class AetherAmlDataset(msrest.serialization.Model): """AetherAmlDataset. :ivar registered_data_set_reference: :vartype registered_data_set_reference: ~flow.models.AetherRegisteredDataSetReference :ivar saved_data_set_reference: :vartype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference :ivar additional_transformations: :vartype additional_transformations: str """ _attribute_map = { 'registered_data_set_reference': {'key': 'registeredDataSetReference', 'type': 'AetherRegisteredDataSetReference'}, 'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'AetherSavedDataSetReference'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, *, registered_data_set_reference: Optional["AetherRegisteredDataSetReference"] = None, saved_data_set_reference: Optional["AetherSavedDataSetReference"] = None, additional_transformations: Optional[str] = None, **kwargs ): """ :keyword registered_data_set_reference: :paramtype registered_data_set_reference: ~flow.models.AetherRegisteredDataSetReference :keyword saved_data_set_reference: :paramtype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference :keyword additional_transformations: :paramtype additional_transformations: str """ super(AetherAmlDataset, self).__init__(**kwargs) self.registered_data_set_reference = registered_data_set_reference self.saved_data_set_reference = saved_data_set_reference self.additional_transformations = additional_transformations class AetherAmlSparkCloudSetting(msrest.serialization.Model): """AetherAmlSparkCloudSetting. :ivar entry: :vartype entry: ~flow.models.AetherEntrySetting :ivar files: :vartype files: list[str] :ivar archives: :vartype archives: list[str] :ivar jars: :vartype jars: list[str] :ivar py_files: :vartype py_files: list[str] :ivar driver_memory: :vartype driver_memory: str :ivar driver_cores: :vartype driver_cores: int :ivar executor_memory: :vartype executor_memory: str :ivar executor_cores: :vartype executor_cores: int :ivar number_executors: :vartype number_executors: int :ivar environment_asset_id: :vartype environment_asset_id: str :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar inline_environment_definition_string: :vartype inline_environment_definition_string: str :ivar conf: Dictionary of :code:`<string>`. :vartype conf: dict[str, str] :ivar compute: :vartype compute: str :ivar resources: :vartype resources: ~flow.models.AetherResourcesSetting :ivar identity: :vartype identity: ~flow.models.AetherIdentitySetting """ _attribute_map = { 'entry': {'key': 'entry', 'type': 'AetherEntrySetting'}, 'files': {'key': 'files', 'type': '[str]'}, 'archives': {'key': 'archives', 'type': '[str]'}, 'jars': {'key': 'jars', 'type': '[str]'}, 'py_files': {'key': 'pyFiles', 'type': '[str]'}, 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, 'driver_cores': {'key': 'driverCores', 'type': 'int'}, 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, 'executor_cores': {'key': 'executorCores', 'type': 'int'}, 'number_executors': {'key': 'numberExecutors', 'type': 'int'}, 'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'inline_environment_definition_string': {'key': 'inlineEnvironmentDefinitionString', 'type': 'str'}, 'conf': {'key': 'conf', 'type': '{str}'}, 'compute': {'key': 'compute', 'type': 'str'}, 'resources': {'key': 'resources', 'type': 'AetherResourcesSetting'}, 'identity': {'key': 'identity', 'type': 'AetherIdentitySetting'}, } def __init__( self, *, entry: Optional["AetherEntrySetting"] = None, files: Optional[List[str]] = None, archives: Optional[List[str]] = None, jars: Optional[List[str]] = None, py_files: Optional[List[str]] = None, driver_memory: Optional[str] = None, driver_cores: Optional[int] = None, executor_memory: Optional[str] = None, executor_cores: Optional[int] = None, number_executors: Optional[int] = None, environment_asset_id: Optional[str] = None, environment_variables: Optional[Dict[str, str]] = None, inline_environment_definition_string: Optional[str] = None, conf: Optional[Dict[str, str]] = None, compute: Optional[str] = None, resources: Optional["AetherResourcesSetting"] = None, identity: Optional["AetherIdentitySetting"] = None, **kwargs ): """ :keyword entry: :paramtype entry: ~flow.models.AetherEntrySetting :keyword files: :paramtype files: list[str] :keyword archives: :paramtype archives: list[str] :keyword jars: :paramtype jars: list[str] :keyword py_files: :paramtype py_files: list[str] :keyword driver_memory: :paramtype driver_memory: str :keyword driver_cores: :paramtype driver_cores: int :keyword executor_memory: :paramtype executor_memory: str :keyword executor_cores: :paramtype executor_cores: int :keyword number_executors: :paramtype number_executors: int :keyword environment_asset_id: :paramtype environment_asset_id: str :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword inline_environment_definition_string: :paramtype inline_environment_definition_string: str :keyword conf: Dictionary of :code:`<string>`. :paramtype conf: dict[str, str] :keyword compute: :paramtype compute: str :keyword resources: :paramtype resources: ~flow.models.AetherResourcesSetting :keyword identity: :paramtype identity: ~flow.models.AetherIdentitySetting """ super(AetherAmlSparkCloudSetting, self).__init__(**kwargs) self.entry = entry self.files = files self.archives = archives self.jars = jars self.py_files = py_files self.driver_memory = driver_memory self.driver_cores = driver_cores self.executor_memory = executor_memory self.executor_cores = executor_cores self.number_executors = number_executors self.environment_asset_id = environment_asset_id self.environment_variables = environment_variables self.inline_environment_definition_string = inline_environment_definition_string self.conf = conf self.compute = compute self.resources = resources self.identity = identity class AetherAPCloudConfiguration(msrest.serialization.Model): """AetherAPCloudConfiguration. :ivar referenced_ap_module_guid: :vartype referenced_ap_module_guid: str :ivar user_alias: :vartype user_alias: str :ivar aether_module_type: :vartype aether_module_type: str """ _attribute_map = { 'referenced_ap_module_guid': {'key': 'referencedAPModuleGuid', 'type': 'str'}, 'user_alias': {'key': 'userAlias', 'type': 'str'}, 'aether_module_type': {'key': 'aetherModuleType', 'type': 'str'}, } def __init__( self, *, referenced_ap_module_guid: Optional[str] = None, user_alias: Optional[str] = None, aether_module_type: Optional[str] = None, **kwargs ): """ :keyword referenced_ap_module_guid: :paramtype referenced_ap_module_guid: str :keyword user_alias: :paramtype user_alias: str :keyword aether_module_type: :paramtype aether_module_type: str """ super(AetherAPCloudConfiguration, self).__init__(**kwargs) self.referenced_ap_module_guid = referenced_ap_module_guid self.user_alias = user_alias self.aether_module_type = aether_module_type class AetherArgumentAssignment(msrest.serialization.Model): """AetherArgumentAssignment. :ivar value_type: Possible values include: "Literal", "Parameter", "Input", "Output", "NestedList", "StringInterpolationList". :vartype value_type: str or ~flow.models.AetherArgumentValueType :ivar value: :vartype value: str :ivar nested_argument_list: :vartype nested_argument_list: list[~flow.models.AetherArgumentAssignment] :ivar string_interpolation_argument_list: :vartype string_interpolation_argument_list: list[~flow.models.AetherArgumentAssignment] """ _attribute_map = { 'value_type': {'key': 'valueType', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'nested_argument_list': {'key': 'nestedArgumentList', 'type': '[AetherArgumentAssignment]'}, 'string_interpolation_argument_list': {'key': 'stringInterpolationArgumentList', 'type': '[AetherArgumentAssignment]'}, } def __init__( self, *, value_type: Optional[Union[str, "AetherArgumentValueType"]] = None, value: Optional[str] = None, nested_argument_list: Optional[List["AetherArgumentAssignment"]] = None, string_interpolation_argument_list: Optional[List["AetherArgumentAssignment"]] = None, **kwargs ): """ :keyword value_type: Possible values include: "Literal", "Parameter", "Input", "Output", "NestedList", "StringInterpolationList". :paramtype value_type: str or ~flow.models.AetherArgumentValueType :keyword value: :paramtype value: str :keyword nested_argument_list: :paramtype nested_argument_list: list[~flow.models.AetherArgumentAssignment] :keyword string_interpolation_argument_list: :paramtype string_interpolation_argument_list: list[~flow.models.AetherArgumentAssignment] """ super(AetherArgumentAssignment, self).__init__(**kwargs) self.value_type = value_type self.value = value self.nested_argument_list = nested_argument_list self.string_interpolation_argument_list = string_interpolation_argument_list class AetherAssetDefinition(msrest.serialization.Model): """AetherAssetDefinition. :ivar path: :vartype path: str :ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :vartype type: str or ~flow.models.AetherAssetType :ivar asset_id: :vartype asset_id: str :ivar initial_asset_id: :vartype initial_asset_id: str :ivar serialized_asset_id: :vartype serialized_asset_id: str """ _attribute_map = { 'path': {'key': 'path', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, 'initial_asset_id': {'key': 'initialAssetId', 'type': 'str'}, 'serialized_asset_id': {'key': 'serializedAssetId', 'type': 'str'}, } def __init__( self, *, path: Optional[str] = None, type: Optional[Union[str, "AetherAssetType"]] = None, asset_id: Optional[str] = None, initial_asset_id: Optional[str] = None, serialized_asset_id: Optional[str] = None, **kwargs ): """ :keyword path: :paramtype path: str :keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :paramtype type: str or ~flow.models.AetherAssetType :keyword asset_id: :paramtype asset_id: str :keyword initial_asset_id: :paramtype initial_asset_id: str :keyword serialized_asset_id: :paramtype serialized_asset_id: str """ super(AetherAssetDefinition, self).__init__(**kwargs) self.path = path self.type = type self.asset_id = asset_id self.initial_asset_id = initial_asset_id self.serialized_asset_id = serialized_asset_id class AetherAssetOutputSettings(msrest.serialization.Model): """AetherAssetOutputSettings. :ivar path: :vartype path: str :ivar path_parameter_assignment: :vartype path_parameter_assignment: ~flow.models.AetherParameterAssignment :ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :vartype type: str or ~flow.models.AetherAssetType :ivar options: This is a dictionary. :vartype options: dict[str, str] :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AetherDataStoreMode :ivar name: :vartype name: str :ivar version: :vartype version: str """ _attribute_map = { 'path': {'key': 'path', 'type': 'str'}, 'path_parameter_assignment': {'key': 'PathParameterAssignment', 'type': 'AetherParameterAssignment'}, 'type': {'key': 'type', 'type': 'str'}, 'options': {'key': 'options', 'type': '{str}'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, *, path: Optional[str] = None, path_parameter_assignment: Optional["AetherParameterAssignment"] = None, type: Optional[Union[str, "AetherAssetType"]] = None, options: Optional[Dict[str, str]] = None, data_store_mode: Optional[Union[str, "AetherDataStoreMode"]] = None, name: Optional[str] = None, version: Optional[str] = None, **kwargs ): """ :keyword path: :paramtype path: str :keyword path_parameter_assignment: :paramtype path_parameter_assignment: ~flow.models.AetherParameterAssignment :keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :paramtype type: str or ~flow.models.AetherAssetType :keyword options: This is a dictionary. :paramtype options: dict[str, str] :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode :keyword name: :paramtype name: str :keyword version: :paramtype version: str """ super(AetherAssetOutputSettings, self).__init__(**kwargs) self.path = path self.path_parameter_assignment = path_parameter_assignment self.type = type self.options = options self.data_store_mode = data_store_mode self.name = name self.version = version class AetherAutoFeaturizeConfiguration(msrest.serialization.Model): """AetherAutoFeaturizeConfiguration. :ivar featurization_config: :vartype featurization_config: ~flow.models.AetherFeaturizationSettings """ _attribute_map = { 'featurization_config': {'key': 'featurizationConfig', 'type': 'AetherFeaturizationSettings'}, } def __init__( self, *, featurization_config: Optional["AetherFeaturizationSettings"] = None, **kwargs ): """ :keyword featurization_config: :paramtype featurization_config: ~flow.models.AetherFeaturizationSettings """ super(AetherAutoFeaturizeConfiguration, self).__init__(**kwargs) self.featurization_config = featurization_config class AetherAutoMLComponentConfiguration(msrest.serialization.Model): """AetherAutoMLComponentConfiguration. :ivar auto_train_config: :vartype auto_train_config: ~flow.models.AetherAutoTrainConfiguration :ivar auto_featurize_config: :vartype auto_featurize_config: ~flow.models.AetherAutoFeaturizeConfiguration """ _attribute_map = { 'auto_train_config': {'key': 'autoTrainConfig', 'type': 'AetherAutoTrainConfiguration'}, 'auto_featurize_config': {'key': 'autoFeaturizeConfig', 'type': 'AetherAutoFeaturizeConfiguration'}, } def __init__( self, *, auto_train_config: Optional["AetherAutoTrainConfiguration"] = None, auto_featurize_config: Optional["AetherAutoFeaturizeConfiguration"] = None, **kwargs ): """ :keyword auto_train_config: :paramtype auto_train_config: ~flow.models.AetherAutoTrainConfiguration :keyword auto_featurize_config: :paramtype auto_featurize_config: ~flow.models.AetherAutoFeaturizeConfiguration """ super(AetherAutoMLComponentConfiguration, self).__init__(**kwargs) self.auto_train_config = auto_train_config self.auto_featurize_config = auto_featurize_config class AetherAutoTrainConfiguration(msrest.serialization.Model): """AetherAutoTrainConfiguration. :ivar general_settings: :vartype general_settings: ~flow.models.AetherGeneralSettings :ivar limit_settings: :vartype limit_settings: ~flow.models.AetherLimitSettings :ivar data_settings: :vartype data_settings: ~flow.models.AetherDataSettings :ivar forecasting_settings: :vartype forecasting_settings: ~flow.models.AetherForecastingSettings :ivar training_settings: :vartype training_settings: ~flow.models.AetherTrainingSettings :ivar sweep_settings: :vartype sweep_settings: ~flow.models.AetherSweepSettings :ivar image_model_settings: Dictionary of :code:`<any>`. :vartype image_model_settings: dict[str, any] :ivar properties: Dictionary of :code:`<string>`. :vartype properties: dict[str, str] :ivar compute_configuration: :vartype compute_configuration: ~flow.models.AetherComputeConfiguration :ivar resource_configurtion: :vartype resource_configurtion: ~flow.models.AetherResourceConfiguration :ivar environment_id: :vartype environment_id: str :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] """ _attribute_map = { 'general_settings': {'key': 'generalSettings', 'type': 'AetherGeneralSettings'}, 'limit_settings': {'key': 'limitSettings', 'type': 'AetherLimitSettings'}, 'data_settings': {'key': 'dataSettings', 'type': 'AetherDataSettings'}, 'forecasting_settings': {'key': 'forecastingSettings', 'type': 'AetherForecastingSettings'}, 'training_settings': {'key': 'trainingSettings', 'type': 'AetherTrainingSettings'}, 'sweep_settings': {'key': 'sweepSettings', 'type': 'AetherSweepSettings'}, 'image_model_settings': {'key': 'imageModelSettings', 'type': '{object}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'compute_configuration': {'key': 'computeConfiguration', 'type': 'AetherComputeConfiguration'}, 'resource_configurtion': {'key': 'resourceConfigurtion', 'type': 'AetherResourceConfiguration'}, 'environment_id': {'key': 'environmentId', 'type': 'str'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, } def __init__( self, *, general_settings: Optional["AetherGeneralSettings"] = None, limit_settings: Optional["AetherLimitSettings"] = None, data_settings: Optional["AetherDataSettings"] = None, forecasting_settings: Optional["AetherForecastingSettings"] = None, training_settings: Optional["AetherTrainingSettings"] = None, sweep_settings: Optional["AetherSweepSettings"] = None, image_model_settings: Optional[Dict[str, Any]] = None, properties: Optional[Dict[str, str]] = None, compute_configuration: Optional["AetherComputeConfiguration"] = None, resource_configurtion: Optional["AetherResourceConfiguration"] = None, environment_id: Optional[str] = None, environment_variables: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword general_settings: :paramtype general_settings: ~flow.models.AetherGeneralSettings :keyword limit_settings: :paramtype limit_settings: ~flow.models.AetherLimitSettings :keyword data_settings: :paramtype data_settings: ~flow.models.AetherDataSettings :keyword forecasting_settings: :paramtype forecasting_settings: ~flow.models.AetherForecastingSettings :keyword training_settings: :paramtype training_settings: ~flow.models.AetherTrainingSettings :keyword sweep_settings: :paramtype sweep_settings: ~flow.models.AetherSweepSettings :keyword image_model_settings: Dictionary of :code:`<any>`. :paramtype image_model_settings: dict[str, any] :keyword properties: Dictionary of :code:`<string>`. :paramtype properties: dict[str, str] :keyword compute_configuration: :paramtype compute_configuration: ~flow.models.AetherComputeConfiguration :keyword resource_configurtion: :paramtype resource_configurtion: ~flow.models.AetherResourceConfiguration :keyword environment_id: :paramtype environment_id: str :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] """ super(AetherAutoTrainConfiguration, self).__init__(**kwargs) self.general_settings = general_settings self.limit_settings = limit_settings self.data_settings = data_settings self.forecasting_settings = forecasting_settings self.training_settings = training_settings self.sweep_settings = sweep_settings self.image_model_settings = image_model_settings self.properties = properties self.compute_configuration = compute_configuration self.resource_configurtion = resource_configurtion self.environment_id = environment_id self.environment_variables = environment_variables class AetherAzureBlobReference(msrest.serialization.Model): """AetherAzureBlobReference. :ivar container: :vartype container: str :ivar sas_token: :vartype sas_token: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar path_type: Possible values include: "Unknown", "File", "Folder". :vartype path_type: str or ~flow.models.AetherFileBasedPathType :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'container': {'key': 'container', 'type': 'str'}, 'sas_token': {'key': 'sasToken', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'path_type': {'key': 'pathType', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, *, container: Optional[str] = None, sas_token: Optional[str] = None, uri: Optional[str] = None, account: Optional[str] = None, relative_path: Optional[str] = None, path_type: Optional[Union[str, "AetherFileBasedPathType"]] = None, aml_data_store_name: Optional[str] = None, **kwargs ): """ :keyword container: :paramtype container: str :keyword sas_token: :paramtype sas_token: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword path_type: Possible values include: "Unknown", "File", "Folder". :paramtype path_type: str or ~flow.models.AetherFileBasedPathType :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AetherAzureBlobReference, self).__init__(**kwargs) self.container = container self.sas_token = sas_token self.uri = uri self.account = account self.relative_path = relative_path self.path_type = path_type self.aml_data_store_name = aml_data_store_name class AetherAzureDatabaseReference(msrest.serialization.Model): """AetherAzureDatabaseReference. :ivar server_uri: :vartype server_uri: str :ivar database_name: :vartype database_name: str :ivar table_name: :vartype table_name: str :ivar sql_query: :vartype sql_query: str :ivar stored_procedure_name: :vartype stored_procedure_name: str :ivar stored_procedure_parameters: :vartype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter] :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'server_uri': {'key': 'serverUri', 'type': 'str'}, 'database_name': {'key': 'databaseName', 'type': 'str'}, 'table_name': {'key': 'tableName', 'type': 'str'}, 'sql_query': {'key': 'sqlQuery', 'type': 'str'}, 'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[AetherStoredProcedureParameter]'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, *, server_uri: Optional[str] = None, database_name: Optional[str] = None, table_name: Optional[str] = None, sql_query: Optional[str] = None, stored_procedure_name: Optional[str] = None, stored_procedure_parameters: Optional[List["AetherStoredProcedureParameter"]] = None, aml_data_store_name: Optional[str] = None, **kwargs ): """ :keyword server_uri: :paramtype server_uri: str :keyword database_name: :paramtype database_name: str :keyword table_name: :paramtype table_name: str :keyword sql_query: :paramtype sql_query: str :keyword stored_procedure_name: :paramtype stored_procedure_name: str :keyword stored_procedure_parameters: :paramtype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter] :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AetherAzureDatabaseReference, self).__init__(**kwargs) self.server_uri = server_uri self.database_name = database_name self.table_name = table_name self.sql_query = sql_query self.stored_procedure_name = stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.aml_data_store_name = aml_data_store_name class AetherAzureDataLakeGen2Reference(msrest.serialization.Model): """AetherAzureDataLakeGen2Reference. :ivar file_system_name: :vartype file_system_name: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar path_type: Possible values include: "Unknown", "File", "Folder". :vartype path_type: str or ~flow.models.AetherFileBasedPathType :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'file_system_name': {'key': 'fileSystemName', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'path_type': {'key': 'pathType', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, *, file_system_name: Optional[str] = None, uri: Optional[str] = None, account: Optional[str] = None, relative_path: Optional[str] = None, path_type: Optional[Union[str, "AetherFileBasedPathType"]] = None, aml_data_store_name: Optional[str] = None, **kwargs ): """ :keyword file_system_name: :paramtype file_system_name: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword path_type: Possible values include: "Unknown", "File", "Folder". :paramtype path_type: str or ~flow.models.AetherFileBasedPathType :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AetherAzureDataLakeGen2Reference, self).__init__(**kwargs) self.file_system_name = file_system_name self.uri = uri self.account = account self.relative_path = relative_path self.path_type = path_type self.aml_data_store_name = aml_data_store_name class AetherAzureDataLakeReference(msrest.serialization.Model): """AetherAzureDataLakeReference. :ivar tenant: :vartype tenant: str :ivar subscription: :vartype subscription: str :ivar resource_group: :vartype resource_group: str :ivar data_lake_uri: :vartype data_lake_uri: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar path_type: Possible values include: "Unknown", "File", "Folder". :vartype path_type: str or ~flow.models.AetherFileBasedPathType :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'tenant': {'key': 'tenant', 'type': 'str'}, 'subscription': {'key': 'subscription', 'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, 'data_lake_uri': {'key': 'dataLakeUri', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'path_type': {'key': 'pathType', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, *, tenant: Optional[str] = None, subscription: Optional[str] = None, resource_group: Optional[str] = None, data_lake_uri: Optional[str] = None, uri: Optional[str] = None, account: Optional[str] = None, relative_path: Optional[str] = None, path_type: Optional[Union[str, "AetherFileBasedPathType"]] = None, aml_data_store_name: Optional[str] = None, **kwargs ): """ :keyword tenant: :paramtype tenant: str :keyword subscription: :paramtype subscription: str :keyword resource_group: :paramtype resource_group: str :keyword data_lake_uri: :paramtype data_lake_uri: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword path_type: Possible values include: "Unknown", "File", "Folder". :paramtype path_type: str or ~flow.models.AetherFileBasedPathType :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AetherAzureDataLakeReference, self).__init__(**kwargs) self.tenant = tenant self.subscription = subscription self.resource_group = resource_group self.data_lake_uri = data_lake_uri self.uri = uri self.account = account self.relative_path = relative_path self.path_type = path_type self.aml_data_store_name = aml_data_store_name class AetherAzureFilesReference(msrest.serialization.Model): """AetherAzureFilesReference. :ivar share: :vartype share: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar path_type: Possible values include: "Unknown", "File", "Folder". :vartype path_type: str or ~flow.models.AetherFileBasedPathType :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'share': {'key': 'share', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'path_type': {'key': 'pathType', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, *, share: Optional[str] = None, uri: Optional[str] = None, account: Optional[str] = None, relative_path: Optional[str] = None, path_type: Optional[Union[str, "AetherFileBasedPathType"]] = None, aml_data_store_name: Optional[str] = None, **kwargs ): """ :keyword share: :paramtype share: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword path_type: Possible values include: "Unknown", "File", "Folder". :paramtype path_type: str or ~flow.models.AetherFileBasedPathType :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AetherAzureFilesReference, self).__init__(**kwargs) self.share = share self.uri = uri self.account = account self.relative_path = relative_path self.path_type = path_type self.aml_data_store_name = aml_data_store_name class AetherBatchAiComputeInfo(msrest.serialization.Model): """AetherBatchAiComputeInfo. :ivar batch_ai_subscription_id: :vartype batch_ai_subscription_id: str :ivar batch_ai_resource_group: :vartype batch_ai_resource_group: str :ivar batch_ai_workspace_name: :vartype batch_ai_workspace_name: str :ivar cluster_name: :vartype cluster_name: str :ivar native_shared_directory: :vartype native_shared_directory: str """ _attribute_map = { 'batch_ai_subscription_id': {'key': 'batchAiSubscriptionId', 'type': 'str'}, 'batch_ai_resource_group': {'key': 'batchAiResourceGroup', 'type': 'str'}, 'batch_ai_workspace_name': {'key': 'batchAiWorkspaceName', 'type': 'str'}, 'cluster_name': {'key': 'clusterName', 'type': 'str'}, 'native_shared_directory': {'key': 'nativeSharedDirectory', 'type': 'str'}, } def __init__( self, *, batch_ai_subscription_id: Optional[str] = None, batch_ai_resource_group: Optional[str] = None, batch_ai_workspace_name: Optional[str] = None, cluster_name: Optional[str] = None, native_shared_directory: Optional[str] = None, **kwargs ): """ :keyword batch_ai_subscription_id: :paramtype batch_ai_subscription_id: str :keyword batch_ai_resource_group: :paramtype batch_ai_resource_group: str :keyword batch_ai_workspace_name: :paramtype batch_ai_workspace_name: str :keyword cluster_name: :paramtype cluster_name: str :keyword native_shared_directory: :paramtype native_shared_directory: str """ super(AetherBatchAiComputeInfo, self).__init__(**kwargs) self.batch_ai_subscription_id = batch_ai_subscription_id self.batch_ai_resource_group = batch_ai_resource_group self.batch_ai_workspace_name = batch_ai_workspace_name self.cluster_name = cluster_name self.native_shared_directory = native_shared_directory class AetherBuildArtifactInfo(msrest.serialization.Model): """AetherBuildArtifactInfo. :ivar type: Possible values include: "CloudBuild", "Vso", "VsoGit". :vartype type: str or ~flow.models.AetherBuildSourceType :ivar cloud_build_drop_path_info: :vartype cloud_build_drop_path_info: ~flow.models.AetherCloudBuildDropPathInfo :ivar vso_build_artifact_info: :vartype vso_build_artifact_info: ~flow.models.AetherVsoBuildArtifactInfo """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'cloud_build_drop_path_info': {'key': 'cloudBuildDropPathInfo', 'type': 'AetherCloudBuildDropPathInfo'}, 'vso_build_artifact_info': {'key': 'vsoBuildArtifactInfo', 'type': 'AetherVsoBuildArtifactInfo'}, } def __init__( self, *, type: Optional[Union[str, "AetherBuildSourceType"]] = None, cloud_build_drop_path_info: Optional["AetherCloudBuildDropPathInfo"] = None, vso_build_artifact_info: Optional["AetherVsoBuildArtifactInfo"] = None, **kwargs ): """ :keyword type: Possible values include: "CloudBuild", "Vso", "VsoGit". :paramtype type: str or ~flow.models.AetherBuildSourceType :keyword cloud_build_drop_path_info: :paramtype cloud_build_drop_path_info: ~flow.models.AetherCloudBuildDropPathInfo :keyword vso_build_artifact_info: :paramtype vso_build_artifact_info: ~flow.models.AetherVsoBuildArtifactInfo """ super(AetherBuildArtifactInfo, self).__init__(**kwargs) self.type = type self.cloud_build_drop_path_info = cloud_build_drop_path_info self.vso_build_artifact_info = vso_build_artifact_info class AetherCloudBuildDropPathInfo(msrest.serialization.Model): """AetherCloudBuildDropPathInfo. :ivar build_info: :vartype build_info: ~flow.models.AetherCloudBuildInfo :ivar root: :vartype root: str """ _attribute_map = { 'build_info': {'key': 'buildInfo', 'type': 'AetherCloudBuildInfo'}, 'root': {'key': 'root', 'type': 'str'}, } def __init__( self, *, build_info: Optional["AetherCloudBuildInfo"] = None, root: Optional[str] = None, **kwargs ): """ :keyword build_info: :paramtype build_info: ~flow.models.AetherCloudBuildInfo :keyword root: :paramtype root: str """ super(AetherCloudBuildDropPathInfo, self).__init__(**kwargs) self.build_info = build_info self.root = root class AetherCloudBuildInfo(msrest.serialization.Model): """AetherCloudBuildInfo. :ivar queue_info: :vartype queue_info: ~flow.models.AetherCloudBuildQueueInfo :ivar build_id: :vartype build_id: str :ivar drop_url: :vartype drop_url: str """ _attribute_map = { 'queue_info': {'key': 'queueInfo', 'type': 'AetherCloudBuildQueueInfo'}, 'build_id': {'key': 'buildId', 'type': 'str'}, 'drop_url': {'key': 'dropUrl', 'type': 'str'}, } def __init__( self, *, queue_info: Optional["AetherCloudBuildQueueInfo"] = None, build_id: Optional[str] = None, drop_url: Optional[str] = None, **kwargs ): """ :keyword queue_info: :paramtype queue_info: ~flow.models.AetherCloudBuildQueueInfo :keyword build_id: :paramtype build_id: str :keyword drop_url: :paramtype drop_url: str """ super(AetherCloudBuildInfo, self).__init__(**kwargs) self.queue_info = queue_info self.build_id = build_id self.drop_url = drop_url class AetherCloudBuildQueueInfo(msrest.serialization.Model): """AetherCloudBuildQueueInfo. :ivar build_queue: :vartype build_queue: str :ivar build_role: :vartype build_role: str """ _attribute_map = { 'build_queue': {'key': 'buildQueue', 'type': 'str'}, 'build_role': {'key': 'buildRole', 'type': 'str'}, } def __init__( self, *, build_queue: Optional[str] = None, build_role: Optional[str] = None, **kwargs ): """ :keyword build_queue: :paramtype build_queue: str :keyword build_role: :paramtype build_role: str """ super(AetherCloudBuildQueueInfo, self).__init__(**kwargs) self.build_queue = build_queue self.build_role = build_role class AetherCloudPrioritySetting(msrest.serialization.Model): """AetherCloudPrioritySetting. :ivar scope_priority: :vartype scope_priority: ~flow.models.AetherPriorityConfiguration :ivar aml_compute_priority: :vartype aml_compute_priority: ~flow.models.AetherPriorityConfiguration :ivar itp_priority: :vartype itp_priority: ~flow.models.AetherPriorityConfiguration :ivar singularity_priority: :vartype singularity_priority: ~flow.models.AetherPriorityConfiguration """ _attribute_map = { 'scope_priority': {'key': 'scopePriority', 'type': 'AetherPriorityConfiguration'}, 'aml_compute_priority': {'key': 'AmlComputePriority', 'type': 'AetherPriorityConfiguration'}, 'itp_priority': {'key': 'ItpPriority', 'type': 'AetherPriorityConfiguration'}, 'singularity_priority': {'key': 'SingularityPriority', 'type': 'AetherPriorityConfiguration'}, } def __init__( self, *, scope_priority: Optional["AetherPriorityConfiguration"] = None, aml_compute_priority: Optional["AetherPriorityConfiguration"] = None, itp_priority: Optional["AetherPriorityConfiguration"] = None, singularity_priority: Optional["AetherPriorityConfiguration"] = None, **kwargs ): """ :keyword scope_priority: :paramtype scope_priority: ~flow.models.AetherPriorityConfiguration :keyword aml_compute_priority: :paramtype aml_compute_priority: ~flow.models.AetherPriorityConfiguration :keyword itp_priority: :paramtype itp_priority: ~flow.models.AetherPriorityConfiguration :keyword singularity_priority: :paramtype singularity_priority: ~flow.models.AetherPriorityConfiguration """ super(AetherCloudPrioritySetting, self).__init__(**kwargs) self.scope_priority = scope_priority self.aml_compute_priority = aml_compute_priority self.itp_priority = itp_priority self.singularity_priority = singularity_priority class AetherCloudSettings(msrest.serialization.Model): """AetherCloudSettings. :ivar linked_settings: :vartype linked_settings: list[~flow.models.AetherParameterAssignment] :ivar priority_config: :vartype priority_config: ~flow.models.AetherPriorityConfiguration :ivar hdi_run_config: :vartype hdi_run_config: ~flow.models.AetherHdiRunConfiguration :ivar sub_graph_config: :vartype sub_graph_config: ~flow.models.AetherSubGraphConfiguration :ivar auto_ml_component_config: :vartype auto_ml_component_config: ~flow.models.AetherAutoMLComponentConfiguration :ivar ap_cloud_config: :vartype ap_cloud_config: ~flow.models.AetherAPCloudConfiguration :ivar scope_cloud_config: :vartype scope_cloud_config: ~flow.models.AetherScopeCloudConfiguration :ivar es_cloud_config: :vartype es_cloud_config: ~flow.models.AetherEsCloudConfiguration :ivar data_transfer_cloud_config: :vartype data_transfer_cloud_config: ~flow.models.AetherDataTransferCloudConfiguration :ivar aml_spark_cloud_setting: :vartype aml_spark_cloud_setting: ~flow.models.AetherAmlSparkCloudSetting :ivar data_transfer_v2_cloud_setting: :vartype data_transfer_v2_cloud_setting: ~flow.models.AetherDataTransferV2CloudSetting """ _attribute_map = { 'linked_settings': {'key': 'linkedSettings', 'type': '[AetherParameterAssignment]'}, 'priority_config': {'key': 'priorityConfig', 'type': 'AetherPriorityConfiguration'}, 'hdi_run_config': {'key': 'hdiRunConfig', 'type': 'AetherHdiRunConfiguration'}, 'sub_graph_config': {'key': 'subGraphConfig', 'type': 'AetherSubGraphConfiguration'}, 'auto_ml_component_config': {'key': 'autoMLComponentConfig', 'type': 'AetherAutoMLComponentConfiguration'}, 'ap_cloud_config': {'key': 'apCloudConfig', 'type': 'AetherAPCloudConfiguration'}, 'scope_cloud_config': {'key': 'scopeCloudConfig', 'type': 'AetherScopeCloudConfiguration'}, 'es_cloud_config': {'key': 'esCloudConfig', 'type': 'AetherEsCloudConfiguration'}, 'data_transfer_cloud_config': {'key': 'dataTransferCloudConfig', 'type': 'AetherDataTransferCloudConfiguration'}, 'aml_spark_cloud_setting': {'key': 'amlSparkCloudSetting', 'type': 'AetherAmlSparkCloudSetting'}, 'data_transfer_v2_cloud_setting': {'key': 'dataTransferV2CloudSetting', 'type': 'AetherDataTransferV2CloudSetting'}, } def __init__( self, *, linked_settings: Optional[List["AetherParameterAssignment"]] = None, priority_config: Optional["AetherPriorityConfiguration"] = None, hdi_run_config: Optional["AetherHdiRunConfiguration"] = None, sub_graph_config: Optional["AetherSubGraphConfiguration"] = None, auto_ml_component_config: Optional["AetherAutoMLComponentConfiguration"] = None, ap_cloud_config: Optional["AetherAPCloudConfiguration"] = None, scope_cloud_config: Optional["AetherScopeCloudConfiguration"] = None, es_cloud_config: Optional["AetherEsCloudConfiguration"] = None, data_transfer_cloud_config: Optional["AetherDataTransferCloudConfiguration"] = None, aml_spark_cloud_setting: Optional["AetherAmlSparkCloudSetting"] = None, data_transfer_v2_cloud_setting: Optional["AetherDataTransferV2CloudSetting"] = None, **kwargs ): """ :keyword linked_settings: :paramtype linked_settings: list[~flow.models.AetherParameterAssignment] :keyword priority_config: :paramtype priority_config: ~flow.models.AetherPriorityConfiguration :keyword hdi_run_config: :paramtype hdi_run_config: ~flow.models.AetherHdiRunConfiguration :keyword sub_graph_config: :paramtype sub_graph_config: ~flow.models.AetherSubGraphConfiguration :keyword auto_ml_component_config: :paramtype auto_ml_component_config: ~flow.models.AetherAutoMLComponentConfiguration :keyword ap_cloud_config: :paramtype ap_cloud_config: ~flow.models.AetherAPCloudConfiguration :keyword scope_cloud_config: :paramtype scope_cloud_config: ~flow.models.AetherScopeCloudConfiguration :keyword es_cloud_config: :paramtype es_cloud_config: ~flow.models.AetherEsCloudConfiguration :keyword data_transfer_cloud_config: :paramtype data_transfer_cloud_config: ~flow.models.AetherDataTransferCloudConfiguration :keyword aml_spark_cloud_setting: :paramtype aml_spark_cloud_setting: ~flow.models.AetherAmlSparkCloudSetting :keyword data_transfer_v2_cloud_setting: :paramtype data_transfer_v2_cloud_setting: ~flow.models.AetherDataTransferV2CloudSetting """ super(AetherCloudSettings, self).__init__(**kwargs) self.linked_settings = linked_settings self.priority_config = priority_config self.hdi_run_config = hdi_run_config self.sub_graph_config = sub_graph_config self.auto_ml_component_config = auto_ml_component_config self.ap_cloud_config = ap_cloud_config self.scope_cloud_config = scope_cloud_config self.es_cloud_config = es_cloud_config self.data_transfer_cloud_config = data_transfer_cloud_config self.aml_spark_cloud_setting = aml_spark_cloud_setting self.data_transfer_v2_cloud_setting = data_transfer_v2_cloud_setting class AetherColumnTransformer(msrest.serialization.Model): """AetherColumnTransformer. :ivar fields: :vartype fields: list[str] :ivar parameters: Anything. :vartype parameters: any """ _attribute_map = { 'fields': {'key': 'fields', 'type': '[str]'}, 'parameters': {'key': 'parameters', 'type': 'object'}, } def __init__( self, *, fields: Optional[List[str]] = None, parameters: Optional[Any] = None, **kwargs ): """ :keyword fields: :paramtype fields: list[str] :keyword parameters: Anything. :paramtype parameters: any """ super(AetherColumnTransformer, self).__init__(**kwargs) self.fields = fields self.parameters = parameters class AetherComputeConfiguration(msrest.serialization.Model): """AetherComputeConfiguration. :ivar target: :vartype target: str :ivar instance_count: :vartype instance_count: int :ivar is_local: :vartype is_local: bool :ivar location: :vartype location: str :ivar is_clusterless: :vartype is_clusterless: bool :ivar instance_type: :vartype instance_type: str :ivar properties: Dictionary of :code:`<any>`. :vartype properties: dict[str, any] :ivar is_preemptable: :vartype is_preemptable: bool """ _attribute_map = { 'target': {'key': 'target', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'is_local': {'key': 'isLocal', 'type': 'bool'}, 'location': {'key': 'location', 'type': 'str'}, 'is_clusterless': {'key': 'isClusterless', 'type': 'bool'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{object}'}, 'is_preemptable': {'key': 'isPreemptable', 'type': 'bool'}, } def __init__( self, *, target: Optional[str] = None, instance_count: Optional[int] = None, is_local: Optional[bool] = None, location: Optional[str] = None, is_clusterless: Optional[bool] = None, instance_type: Optional[str] = None, properties: Optional[Dict[str, Any]] = None, is_preemptable: Optional[bool] = None, **kwargs ): """ :keyword target: :paramtype target: str :keyword instance_count: :paramtype instance_count: int :keyword is_local: :paramtype is_local: bool :keyword location: :paramtype location: str :keyword is_clusterless: :paramtype is_clusterless: bool :keyword instance_type: :paramtype instance_type: str :keyword properties: Dictionary of :code:`<any>`. :paramtype properties: dict[str, any] :keyword is_preemptable: :paramtype is_preemptable: bool """ super(AetherComputeConfiguration, self).__init__(**kwargs) self.target = target self.instance_count = instance_count self.is_local = is_local self.location = location self.is_clusterless = is_clusterless self.instance_type = instance_type self.properties = properties self.is_preemptable = is_preemptable class AetherComputeSetting(msrest.serialization.Model): """AetherComputeSetting. :ivar name: :vartype name: str :ivar compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker", "Databricks", "Aisc". :vartype compute_type: str or ~flow.models.AetherComputeType :ivar batch_ai_compute_info: :vartype batch_ai_compute_info: ~flow.models.AetherBatchAiComputeInfo :ivar remote_docker_compute_info: :vartype remote_docker_compute_info: ~flow.models.AetherRemoteDockerComputeInfo :ivar hdi_cluster_compute_info: :vartype hdi_cluster_compute_info: ~flow.models.AetherHdiClusterComputeInfo :ivar mlc_compute_info: :vartype mlc_compute_info: ~flow.models.AetherMlcComputeInfo :ivar databricks_compute_info: :vartype databricks_compute_info: ~flow.models.AetherDatabricksComputeInfo """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'batch_ai_compute_info': {'key': 'batchAiComputeInfo', 'type': 'AetherBatchAiComputeInfo'}, 'remote_docker_compute_info': {'key': 'remoteDockerComputeInfo', 'type': 'AetherRemoteDockerComputeInfo'}, 'hdi_cluster_compute_info': {'key': 'hdiClusterComputeInfo', 'type': 'AetherHdiClusterComputeInfo'}, 'mlc_compute_info': {'key': 'mlcComputeInfo', 'type': 'AetherMlcComputeInfo'}, 'databricks_compute_info': {'key': 'databricksComputeInfo', 'type': 'AetherDatabricksComputeInfo'}, } def __init__( self, *, name: Optional[str] = None, compute_type: Optional[Union[str, "AetherComputeType"]] = None, batch_ai_compute_info: Optional["AetherBatchAiComputeInfo"] = None, remote_docker_compute_info: Optional["AetherRemoteDockerComputeInfo"] = None, hdi_cluster_compute_info: Optional["AetherHdiClusterComputeInfo"] = None, mlc_compute_info: Optional["AetherMlcComputeInfo"] = None, databricks_compute_info: Optional["AetherDatabricksComputeInfo"] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker", "Databricks", "Aisc". :paramtype compute_type: str or ~flow.models.AetherComputeType :keyword batch_ai_compute_info: :paramtype batch_ai_compute_info: ~flow.models.AetherBatchAiComputeInfo :keyword remote_docker_compute_info: :paramtype remote_docker_compute_info: ~flow.models.AetherRemoteDockerComputeInfo :keyword hdi_cluster_compute_info: :paramtype hdi_cluster_compute_info: ~flow.models.AetherHdiClusterComputeInfo :keyword mlc_compute_info: :paramtype mlc_compute_info: ~flow.models.AetherMlcComputeInfo :keyword databricks_compute_info: :paramtype databricks_compute_info: ~flow.models.AetherDatabricksComputeInfo """ super(AetherComputeSetting, self).__init__(**kwargs) self.name = name self.compute_type = compute_type self.batch_ai_compute_info = batch_ai_compute_info self.remote_docker_compute_info = remote_docker_compute_info self.hdi_cluster_compute_info = hdi_cluster_compute_info self.mlc_compute_info = mlc_compute_info self.databricks_compute_info = databricks_compute_info class AetherControlInput(msrest.serialization.Model): """AetherControlInput. :ivar name: :vartype name: str :ivar default_value: Possible values include: "None", "False", "True", "Skipped". :vartype default_value: str or ~flow.models.AetherControlInputValue """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, default_value: Optional[Union[str, "AetherControlInputValue"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword default_value: Possible values include: "None", "False", "True", "Skipped". :paramtype default_value: str or ~flow.models.AetherControlInputValue """ super(AetherControlInput, self).__init__(**kwargs) self.name = name self.default_value = default_value class AetherControlOutput(msrest.serialization.Model): """AetherControlOutput. :ivar name: :vartype name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str """ super(AetherControlOutput, self).__init__(**kwargs) self.name = name class AetherCopyDataTask(msrest.serialization.Model): """AetherCopyDataTask. :ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :vartype data_copy_mode: str or ~flow.models.AetherDataCopyMode """ _attribute_map = { 'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'}, } def __init__( self, *, data_copy_mode: Optional[Union[str, "AetherDataCopyMode"]] = None, **kwargs ): """ :keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :paramtype data_copy_mode: str or ~flow.models.AetherDataCopyMode """ super(AetherCopyDataTask, self).__init__(**kwargs) self.data_copy_mode = data_copy_mode class AetherCosmosReference(msrest.serialization.Model): """AetherCosmosReference. :ivar cluster: :vartype cluster: str :ivar vc: :vartype vc: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'cluster': {'key': 'cluster', 'type': 'str'}, 'vc': {'key': 'vc', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, *, cluster: Optional[str] = None, vc: Optional[str] = None, relative_path: Optional[str] = None, **kwargs ): """ :keyword cluster: :paramtype cluster: str :keyword vc: :paramtype vc: str :keyword relative_path: :paramtype relative_path: str """ super(AetherCosmosReference, self).__init__(**kwargs) self.cluster = cluster self.vc = vc self.relative_path = relative_path class AetherCreatedBy(msrest.serialization.Model): """AetherCreatedBy. :ivar user_object_id: :vartype user_object_id: str :ivar user_tenant_id: :vartype user_tenant_id: str :ivar user_name: :vartype user_name: str :ivar puid: :vartype puid: str :ivar iss: :vartype iss: str :ivar idp: :vartype idp: str :ivar altsec_id: :vartype altsec_id: str :ivar source_ip: :vartype source_ip: str :ivar skip_registry_private_link_check: :vartype skip_registry_private_link_check: bool """ _attribute_map = { 'user_object_id': {'key': 'userObjectId', 'type': 'str'}, 'user_tenant_id': {'key': 'userTenantId', 'type': 'str'}, 'user_name': {'key': 'userName', 'type': 'str'}, 'puid': {'key': 'puid', 'type': 'str'}, 'iss': {'key': 'iss', 'type': 'str'}, 'idp': {'key': 'idp', 'type': 'str'}, 'altsec_id': {'key': 'altsecId', 'type': 'str'}, 'source_ip': {'key': 'sourceIp', 'type': 'str'}, 'skip_registry_private_link_check': {'key': 'skipRegistryPrivateLinkCheck', 'type': 'bool'}, } def __init__( self, *, user_object_id: Optional[str] = None, user_tenant_id: Optional[str] = None, user_name: Optional[str] = None, puid: Optional[str] = None, iss: Optional[str] = None, idp: Optional[str] = None, altsec_id: Optional[str] = None, source_ip: Optional[str] = None, skip_registry_private_link_check: Optional[bool] = None, **kwargs ): """ :keyword user_object_id: :paramtype user_object_id: str :keyword user_tenant_id: :paramtype user_tenant_id: str :keyword user_name: :paramtype user_name: str :keyword puid: :paramtype puid: str :keyword iss: :paramtype iss: str :keyword idp: :paramtype idp: str :keyword altsec_id: :paramtype altsec_id: str :keyword source_ip: :paramtype source_ip: str :keyword skip_registry_private_link_check: :paramtype skip_registry_private_link_check: bool """ super(AetherCreatedBy, self).__init__(**kwargs) self.user_object_id = user_object_id self.user_tenant_id = user_tenant_id self.user_name = user_name self.puid = puid self.iss = iss self.idp = idp self.altsec_id = altsec_id self.source_ip = source_ip self.skip_registry_private_link_check = skip_registry_private_link_check class AetherCustomReference(msrest.serialization.Model): """AetherCustomReference. :ivar aml_data_store_name: :vartype aml_data_store_name: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, *, aml_data_store_name: Optional[str] = None, relative_path: Optional[str] = None, **kwargs ): """ :keyword aml_data_store_name: :paramtype aml_data_store_name: str :keyword relative_path: :paramtype relative_path: str """ super(AetherCustomReference, self).__init__(**kwargs) self.aml_data_store_name = aml_data_store_name self.relative_path = relative_path class AetherDatabaseSink(msrest.serialization.Model): """AetherDatabaseSink. :ivar connection: :vartype connection: str :ivar table: :vartype table: str """ _attribute_map = { 'connection': {'key': 'connection', 'type': 'str'}, 'table': {'key': 'table', 'type': 'str'}, } def __init__( self, *, connection: Optional[str] = None, table: Optional[str] = None, **kwargs ): """ :keyword connection: :paramtype connection: str :keyword table: :paramtype table: str """ super(AetherDatabaseSink, self).__init__(**kwargs) self.connection = connection self.table = table class AetherDatabaseSource(msrest.serialization.Model): """AetherDatabaseSource. :ivar connection: :vartype connection: str :ivar query: :vartype query: str :ivar stored_procedure_name: :vartype stored_procedure_name: str :ivar stored_procedure_parameters: :vartype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter] """ _attribute_map = { 'connection': {'key': 'connection', 'type': 'str'}, 'query': {'key': 'query', 'type': 'str'}, 'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[AetherStoredProcedureParameter]'}, } def __init__( self, *, connection: Optional[str] = None, query: Optional[str] = None, stored_procedure_name: Optional[str] = None, stored_procedure_parameters: Optional[List["AetherStoredProcedureParameter"]] = None, **kwargs ): """ :keyword connection: :paramtype connection: str :keyword query: :paramtype query: str :keyword stored_procedure_name: :paramtype stored_procedure_name: str :keyword stored_procedure_parameters: :paramtype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter] """ super(AetherDatabaseSource, self).__init__(**kwargs) self.connection = connection self.query = query self.stored_procedure_name = stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters class AetherDatabricksComputeInfo(msrest.serialization.Model): """AetherDatabricksComputeInfo. :ivar existing_cluster_id: :vartype existing_cluster_id: str """ _attribute_map = { 'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'}, } def __init__( self, *, existing_cluster_id: Optional[str] = None, **kwargs ): """ :keyword existing_cluster_id: :paramtype existing_cluster_id: str """ super(AetherDatabricksComputeInfo, self).__init__(**kwargs) self.existing_cluster_id = existing_cluster_id class AetherDataLocation(msrest.serialization.Model): """AetherDataLocation. :ivar storage_type: Possible values include: "Cosmos", "AzureBlob", "Artifact", "Snapshot", "SavedAmlDataset", "Asset". :vartype storage_type: str or ~flow.models.AetherDataLocationStorageType :ivar storage_id: :vartype storage_id: str :ivar uri: :vartype uri: str :ivar data_store_name: :vartype data_store_name: str :ivar data_reference: :vartype data_reference: ~flow.models.AetherDataReference :ivar aml_dataset: :vartype aml_dataset: ~flow.models.AetherAmlDataset :ivar asset_definition: :vartype asset_definition: ~flow.models.AetherAssetDefinition :ivar is_compliant: :vartype is_compliant: bool :ivar reuse_calculation_fields: :vartype reuse_calculation_fields: ~flow.models.AetherDataLocationReuseCalculationFields """ _attribute_map = { 'storage_type': {'key': 'storageType', 'type': 'str'}, 'storage_id': {'key': 'storageId', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_reference': {'key': 'dataReference', 'type': 'AetherDataReference'}, 'aml_dataset': {'key': 'amlDataset', 'type': 'AetherAmlDataset'}, 'asset_definition': {'key': 'assetDefinition', 'type': 'AetherAssetDefinition'}, 'is_compliant': {'key': 'isCompliant', 'type': 'bool'}, 'reuse_calculation_fields': {'key': 'reuseCalculationFields', 'type': 'AetherDataLocationReuseCalculationFields'}, } def __init__( self, *, storage_type: Optional[Union[str, "AetherDataLocationStorageType"]] = None, storage_id: Optional[str] = None, uri: Optional[str] = None, data_store_name: Optional[str] = None, data_reference: Optional["AetherDataReference"] = None, aml_dataset: Optional["AetherAmlDataset"] = None, asset_definition: Optional["AetherAssetDefinition"] = None, is_compliant: Optional[bool] = None, reuse_calculation_fields: Optional["AetherDataLocationReuseCalculationFields"] = None, **kwargs ): """ :keyword storage_type: Possible values include: "Cosmos", "AzureBlob", "Artifact", "Snapshot", "SavedAmlDataset", "Asset". :paramtype storage_type: str or ~flow.models.AetherDataLocationStorageType :keyword storage_id: :paramtype storage_id: str :keyword uri: :paramtype uri: str :keyword data_store_name: :paramtype data_store_name: str :keyword data_reference: :paramtype data_reference: ~flow.models.AetherDataReference :keyword aml_dataset: :paramtype aml_dataset: ~flow.models.AetherAmlDataset :keyword asset_definition: :paramtype asset_definition: ~flow.models.AetherAssetDefinition :keyword is_compliant: :paramtype is_compliant: bool :keyword reuse_calculation_fields: :paramtype reuse_calculation_fields: ~flow.models.AetherDataLocationReuseCalculationFields """ super(AetherDataLocation, self).__init__(**kwargs) self.storage_type = storage_type self.storage_id = storage_id self.uri = uri self.data_store_name = data_store_name self.data_reference = data_reference self.aml_dataset = aml_dataset self.asset_definition = asset_definition self.is_compliant = is_compliant self.reuse_calculation_fields = reuse_calculation_fields class AetherDataLocationReuseCalculationFields(msrest.serialization.Model): """AetherDataLocationReuseCalculationFields. :ivar data_store_name: :vartype data_store_name: str :ivar relative_path: :vartype relative_path: str :ivar data_experiment_id: :vartype data_experiment_id: str """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'data_experiment_id': {'key': 'dataExperimentId', 'type': 'str'}, } def __init__( self, *, data_store_name: Optional[str] = None, relative_path: Optional[str] = None, data_experiment_id: Optional[str] = None, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword relative_path: :paramtype relative_path: str :keyword data_experiment_id: :paramtype data_experiment_id: str """ super(AetherDataLocationReuseCalculationFields, self).__init__(**kwargs) self.data_store_name = data_store_name self.relative_path = relative_path self.data_experiment_id = data_experiment_id class AetherDataPath(msrest.serialization.Model): """AetherDataPath. :ivar data_store_name: :vartype data_store_name: str :ivar relative_path: :vartype relative_path: str :ivar sql_data_path: :vartype sql_data_path: ~flow.models.AetherSqlDataPath """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'sql_data_path': {'key': 'sqlDataPath', 'type': 'AetherSqlDataPath'}, } def __init__( self, *, data_store_name: Optional[str] = None, relative_path: Optional[str] = None, sql_data_path: Optional["AetherSqlDataPath"] = None, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword relative_path: :paramtype relative_path: str :keyword sql_data_path: :paramtype sql_data_path: ~flow.models.AetherSqlDataPath """ super(AetherDataPath, self).__init__(**kwargs) self.data_store_name = data_store_name self.relative_path = relative_path self.sql_data_path = sql_data_path class AetherDataReference(msrest.serialization.Model): """AetherDataReference. :ivar type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "Cosmos", "PhillyHdfs", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :vartype type: str or ~flow.models.AetherDataReferenceType :ivar azure_blob_reference: :vartype azure_blob_reference: ~flow.models.AetherAzureBlobReference :ivar azure_data_lake_reference: :vartype azure_data_lake_reference: ~flow.models.AetherAzureDataLakeReference :ivar azure_files_reference: :vartype azure_files_reference: ~flow.models.AetherAzureFilesReference :ivar cosmos_reference: :vartype cosmos_reference: ~flow.models.AetherCosmosReference :ivar philly_hdfs_reference: :vartype philly_hdfs_reference: ~flow.models.AetherPhillyHdfsReference :ivar azure_sql_database_reference: :vartype azure_sql_database_reference: ~flow.models.AetherAzureDatabaseReference :ivar azure_postgres_database_reference: :vartype azure_postgres_database_reference: ~flow.models.AetherAzureDatabaseReference :ivar azure_data_lake_gen2_reference: :vartype azure_data_lake_gen2_reference: ~flow.models.AetherAzureDataLakeGen2Reference :ivar dbfs_reference: :vartype dbfs_reference: ~flow.models.AetherDBFSReference :ivar azure_my_sql_database_reference: :vartype azure_my_sql_database_reference: ~flow.models.AetherAzureDatabaseReference :ivar custom_reference: :vartype custom_reference: ~flow.models.AetherCustomReference :ivar hdfs_reference: :vartype hdfs_reference: ~flow.models.AetherHdfsReference """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'azure_blob_reference': {'key': 'azureBlobReference', 'type': 'AetherAzureBlobReference'}, 'azure_data_lake_reference': {'key': 'azureDataLakeReference', 'type': 'AetherAzureDataLakeReference'}, 'azure_files_reference': {'key': 'azureFilesReference', 'type': 'AetherAzureFilesReference'}, 'cosmos_reference': {'key': 'cosmosReference', 'type': 'AetherCosmosReference'}, 'philly_hdfs_reference': {'key': 'phillyHdfsReference', 'type': 'AetherPhillyHdfsReference'}, 'azure_sql_database_reference': {'key': 'azureSqlDatabaseReference', 'type': 'AetherAzureDatabaseReference'}, 'azure_postgres_database_reference': {'key': 'azurePostgresDatabaseReference', 'type': 'AetherAzureDatabaseReference'}, 'azure_data_lake_gen2_reference': {'key': 'azureDataLakeGen2Reference', 'type': 'AetherAzureDataLakeGen2Reference'}, 'dbfs_reference': {'key': 'dbfsReference', 'type': 'AetherDBFSReference'}, 'azure_my_sql_database_reference': {'key': 'azureMySqlDatabaseReference', 'type': 'AetherAzureDatabaseReference'}, 'custom_reference': {'key': 'customReference', 'type': 'AetherCustomReference'}, 'hdfs_reference': {'key': 'hdfsReference', 'type': 'AetherHdfsReference'}, } def __init__( self, *, type: Optional[Union[str, "AetherDataReferenceType"]] = None, azure_blob_reference: Optional["AetherAzureBlobReference"] = None, azure_data_lake_reference: Optional["AetherAzureDataLakeReference"] = None, azure_files_reference: Optional["AetherAzureFilesReference"] = None, cosmos_reference: Optional["AetherCosmosReference"] = None, philly_hdfs_reference: Optional["AetherPhillyHdfsReference"] = None, azure_sql_database_reference: Optional["AetherAzureDatabaseReference"] = None, azure_postgres_database_reference: Optional["AetherAzureDatabaseReference"] = None, azure_data_lake_gen2_reference: Optional["AetherAzureDataLakeGen2Reference"] = None, dbfs_reference: Optional["AetherDBFSReference"] = None, azure_my_sql_database_reference: Optional["AetherAzureDatabaseReference"] = None, custom_reference: Optional["AetherCustomReference"] = None, hdfs_reference: Optional["AetherHdfsReference"] = None, **kwargs ): """ :keyword type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "Cosmos", "PhillyHdfs", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :paramtype type: str or ~flow.models.AetherDataReferenceType :keyword azure_blob_reference: :paramtype azure_blob_reference: ~flow.models.AetherAzureBlobReference :keyword azure_data_lake_reference: :paramtype azure_data_lake_reference: ~flow.models.AetherAzureDataLakeReference :keyword azure_files_reference: :paramtype azure_files_reference: ~flow.models.AetherAzureFilesReference :keyword cosmos_reference: :paramtype cosmos_reference: ~flow.models.AetherCosmosReference :keyword philly_hdfs_reference: :paramtype philly_hdfs_reference: ~flow.models.AetherPhillyHdfsReference :keyword azure_sql_database_reference: :paramtype azure_sql_database_reference: ~flow.models.AetherAzureDatabaseReference :keyword azure_postgres_database_reference: :paramtype azure_postgres_database_reference: ~flow.models.AetherAzureDatabaseReference :keyword azure_data_lake_gen2_reference: :paramtype azure_data_lake_gen2_reference: ~flow.models.AetherAzureDataLakeGen2Reference :keyword dbfs_reference: :paramtype dbfs_reference: ~flow.models.AetherDBFSReference :keyword azure_my_sql_database_reference: :paramtype azure_my_sql_database_reference: ~flow.models.AetherAzureDatabaseReference :keyword custom_reference: :paramtype custom_reference: ~flow.models.AetherCustomReference :keyword hdfs_reference: :paramtype hdfs_reference: ~flow.models.AetherHdfsReference """ super(AetherDataReference, self).__init__(**kwargs) self.type = type self.azure_blob_reference = azure_blob_reference self.azure_data_lake_reference = azure_data_lake_reference self.azure_files_reference = azure_files_reference self.cosmos_reference = cosmos_reference self.philly_hdfs_reference = philly_hdfs_reference self.azure_sql_database_reference = azure_sql_database_reference self.azure_postgres_database_reference = azure_postgres_database_reference self.azure_data_lake_gen2_reference = azure_data_lake_gen2_reference self.dbfs_reference = dbfs_reference self.azure_my_sql_database_reference = azure_my_sql_database_reference self.custom_reference = custom_reference self.hdfs_reference = hdfs_reference class AetherDataSetDefinition(msrest.serialization.Model): """AetherDataSetDefinition. :ivar data_type_short_name: :vartype data_type_short_name: str :ivar parameter_name: :vartype parameter_name: str :ivar value: :vartype value: ~flow.models.AetherDataSetDefinitionValue """ _attribute_map = { 'data_type_short_name': {'key': 'dataTypeShortName', 'type': 'str'}, 'parameter_name': {'key': 'parameterName', 'type': 'str'}, 'value': {'key': 'value', 'type': 'AetherDataSetDefinitionValue'}, } def __init__( self, *, data_type_short_name: Optional[str] = None, parameter_name: Optional[str] = None, value: Optional["AetherDataSetDefinitionValue"] = None, **kwargs ): """ :keyword data_type_short_name: :paramtype data_type_short_name: str :keyword parameter_name: :paramtype parameter_name: str :keyword value: :paramtype value: ~flow.models.AetherDataSetDefinitionValue """ super(AetherDataSetDefinition, self).__init__(**kwargs) self.data_type_short_name = data_type_short_name self.parameter_name = parameter_name self.value = value class AetherDataSetDefinitionValue(msrest.serialization.Model): """AetherDataSetDefinitionValue. :ivar literal_value: :vartype literal_value: ~flow.models.AetherDataPath :ivar data_set_reference: :vartype data_set_reference: ~flow.models.AetherRegisteredDataSetReference :ivar saved_data_set_reference: :vartype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference :ivar asset_definition: :vartype asset_definition: ~flow.models.AetherAssetDefinition """ _attribute_map = { 'literal_value': {'key': 'literalValue', 'type': 'AetherDataPath'}, 'data_set_reference': {'key': 'dataSetReference', 'type': 'AetherRegisteredDataSetReference'}, 'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'AetherSavedDataSetReference'}, 'asset_definition': {'key': 'assetDefinition', 'type': 'AetherAssetDefinition'}, } def __init__( self, *, literal_value: Optional["AetherDataPath"] = None, data_set_reference: Optional["AetherRegisteredDataSetReference"] = None, saved_data_set_reference: Optional["AetherSavedDataSetReference"] = None, asset_definition: Optional["AetherAssetDefinition"] = None, **kwargs ): """ :keyword literal_value: :paramtype literal_value: ~flow.models.AetherDataPath :keyword data_set_reference: :paramtype data_set_reference: ~flow.models.AetherRegisteredDataSetReference :keyword saved_data_set_reference: :paramtype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference :keyword asset_definition: :paramtype asset_definition: ~flow.models.AetherAssetDefinition """ super(AetherDataSetDefinitionValue, self).__init__(**kwargs) self.literal_value = literal_value self.data_set_reference = data_set_reference self.saved_data_set_reference = saved_data_set_reference self.asset_definition = asset_definition class AetherDatasetOutput(msrest.serialization.Model): """AetherDatasetOutput. :ivar dataset_type: Possible values include: "File", "Tabular". :vartype dataset_type: str or ~flow.models.AetherDatasetType :ivar dataset_registration: :vartype dataset_registration: ~flow.models.AetherDatasetRegistration :ivar dataset_output_options: :vartype dataset_output_options: ~flow.models.AetherDatasetOutputOptions """ _attribute_map = { 'dataset_type': {'key': 'datasetType', 'type': 'str'}, 'dataset_registration': {'key': 'datasetRegistration', 'type': 'AetherDatasetRegistration'}, 'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'AetherDatasetOutputOptions'}, } def __init__( self, *, dataset_type: Optional[Union[str, "AetherDatasetType"]] = None, dataset_registration: Optional["AetherDatasetRegistration"] = None, dataset_output_options: Optional["AetherDatasetOutputOptions"] = None, **kwargs ): """ :keyword dataset_type: Possible values include: "File", "Tabular". :paramtype dataset_type: str or ~flow.models.AetherDatasetType :keyword dataset_registration: :paramtype dataset_registration: ~flow.models.AetherDatasetRegistration :keyword dataset_output_options: :paramtype dataset_output_options: ~flow.models.AetherDatasetOutputOptions """ super(AetherDatasetOutput, self).__init__(**kwargs) self.dataset_type = dataset_type self.dataset_registration = dataset_registration self.dataset_output_options = dataset_output_options class AetherDatasetOutputOptions(msrest.serialization.Model): """AetherDatasetOutputOptions. :ivar source_globs: :vartype source_globs: ~flow.models.AetherGlobsOptions :ivar path_on_datastore: :vartype path_on_datastore: str :ivar path_on_datastore_parameter_assignment: :vartype path_on_datastore_parameter_assignment: ~flow.models.AetherParameterAssignment """ _attribute_map = { 'source_globs': {'key': 'sourceGlobs', 'type': 'AetherGlobsOptions'}, 'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'}, 'path_on_datastore_parameter_assignment': {'key': 'PathOnDatastoreParameterAssignment', 'type': 'AetherParameterAssignment'}, } def __init__( self, *, source_globs: Optional["AetherGlobsOptions"] = None, path_on_datastore: Optional[str] = None, path_on_datastore_parameter_assignment: Optional["AetherParameterAssignment"] = None, **kwargs ): """ :keyword source_globs: :paramtype source_globs: ~flow.models.AetherGlobsOptions :keyword path_on_datastore: :paramtype path_on_datastore: str :keyword path_on_datastore_parameter_assignment: :paramtype path_on_datastore_parameter_assignment: ~flow.models.AetherParameterAssignment """ super(AetherDatasetOutputOptions, self).__init__(**kwargs) self.source_globs = source_globs self.path_on_datastore = path_on_datastore self.path_on_datastore_parameter_assignment = path_on_datastore_parameter_assignment class AetherDatasetRegistration(msrest.serialization.Model): """AetherDatasetRegistration. :ivar name: :vartype name: str :ivar create_new_version: :vartype create_new_version: bool :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar additional_transformations: :vartype additional_transformations: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'create_new_version': {'key': 'createNewVersion', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, create_new_version: Optional[bool] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, additional_transformations: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword create_new_version: :paramtype create_new_version: bool :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword additional_transformations: :paramtype additional_transformations: str """ super(AetherDatasetRegistration, self).__init__(**kwargs) self.name = name self.create_new_version = create_new_version self.description = description self.tags = tags self.additional_transformations = additional_transformations class AetherDataSettings(msrest.serialization.Model): """AetherDataSettings. :ivar target_column_name: :vartype target_column_name: str :ivar weight_column_name: :vartype weight_column_name: str :ivar positive_label: :vartype positive_label: str :ivar validation_data: :vartype validation_data: ~flow.models.AetherValidationDataSettings :ivar test_data: :vartype test_data: ~flow.models.AetherTestDataSettings """ _attribute_map = { 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, 'positive_label': {'key': 'positiveLabel', 'type': 'str'}, 'validation_data': {'key': 'validationData', 'type': 'AetherValidationDataSettings'}, 'test_data': {'key': 'testData', 'type': 'AetherTestDataSettings'}, } def __init__( self, *, target_column_name: Optional[str] = None, weight_column_name: Optional[str] = None, positive_label: Optional[str] = None, validation_data: Optional["AetherValidationDataSettings"] = None, test_data: Optional["AetherTestDataSettings"] = None, **kwargs ): """ :keyword target_column_name: :paramtype target_column_name: str :keyword weight_column_name: :paramtype weight_column_name: str :keyword positive_label: :paramtype positive_label: str :keyword validation_data: :paramtype validation_data: ~flow.models.AetherValidationDataSettings :keyword test_data: :paramtype test_data: ~flow.models.AetherTestDataSettings """ super(AetherDataSettings, self).__init__(**kwargs) self.target_column_name = target_column_name self.weight_column_name = weight_column_name self.positive_label = positive_label self.validation_data = validation_data self.test_data = test_data class AetherDatastoreSetting(msrest.serialization.Model): """AetherDatastoreSetting. :ivar data_store_name: :vartype data_store_name: str """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, } def __init__( self, *, data_store_name: Optional[str] = None, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str """ super(AetherDatastoreSetting, self).__init__(**kwargs) self.data_store_name = data_store_name class AetherDataTransferCloudConfiguration(msrest.serialization.Model): """AetherDataTransferCloudConfiguration. :ivar allow_overwrite: :vartype allow_overwrite: bool """ _attribute_map = { 'allow_overwrite': {'key': 'AllowOverwrite', 'type': 'bool'}, } def __init__( self, *, allow_overwrite: Optional[bool] = None, **kwargs ): """ :keyword allow_overwrite: :paramtype allow_overwrite: bool """ super(AetherDataTransferCloudConfiguration, self).__init__(**kwargs) self.allow_overwrite = allow_overwrite class AetherDataTransferSink(msrest.serialization.Model): """AetherDataTransferSink. :ivar type: Possible values include: "DataBase", "FileSystem". :vartype type: str or ~flow.models.AetherDataTransferStorageType :ivar file_system: :vartype file_system: ~flow.models.AetherFileSystem :ivar database_sink: :vartype database_sink: ~flow.models.AetherDatabaseSink """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'file_system': {'key': 'fileSystem', 'type': 'AetherFileSystem'}, 'database_sink': {'key': 'databaseSink', 'type': 'AetherDatabaseSink'}, } def __init__( self, *, type: Optional[Union[str, "AetherDataTransferStorageType"]] = None, file_system: Optional["AetherFileSystem"] = None, database_sink: Optional["AetherDatabaseSink"] = None, **kwargs ): """ :keyword type: Possible values include: "DataBase", "FileSystem". :paramtype type: str or ~flow.models.AetherDataTransferStorageType :keyword file_system: :paramtype file_system: ~flow.models.AetherFileSystem :keyword database_sink: :paramtype database_sink: ~flow.models.AetherDatabaseSink """ super(AetherDataTransferSink, self).__init__(**kwargs) self.type = type self.file_system = file_system self.database_sink = database_sink class AetherDataTransferSource(msrest.serialization.Model): """AetherDataTransferSource. :ivar type: Possible values include: "DataBase", "FileSystem". :vartype type: str or ~flow.models.AetherDataTransferStorageType :ivar file_system: :vartype file_system: ~flow.models.AetherFileSystem :ivar database_source: :vartype database_source: ~flow.models.AetherDatabaseSource """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'file_system': {'key': 'fileSystem', 'type': 'AetherFileSystem'}, 'database_source': {'key': 'databaseSource', 'type': 'AetherDatabaseSource'}, } def __init__( self, *, type: Optional[Union[str, "AetherDataTransferStorageType"]] = None, file_system: Optional["AetherFileSystem"] = None, database_source: Optional["AetherDatabaseSource"] = None, **kwargs ): """ :keyword type: Possible values include: "DataBase", "FileSystem". :paramtype type: str or ~flow.models.AetherDataTransferStorageType :keyword file_system: :paramtype file_system: ~flow.models.AetherFileSystem :keyword database_source: :paramtype database_source: ~flow.models.AetherDatabaseSource """ super(AetherDataTransferSource, self).__init__(**kwargs) self.type = type self.file_system = file_system self.database_source = database_source class AetherDataTransferV2CloudSetting(msrest.serialization.Model): """AetherDataTransferV2CloudSetting. :ivar task_type: Possible values include: "ImportData", "ExportData", "CopyData". :vartype task_type: str or ~flow.models.AetherDataTransferTaskType :ivar compute_name: :vartype compute_name: str :ivar copy_data_task: :vartype copy_data_task: ~flow.models.AetherCopyDataTask :ivar import_data_task: :vartype import_data_task: ~flow.models.AetherImportDataTask :ivar export_data_task: :vartype export_data_task: ~flow.models.AetherExportDataTask :ivar data_transfer_sources: This is a dictionary. :vartype data_transfer_sources: dict[str, ~flow.models.AetherDataTransferSource] :ivar data_transfer_sinks: This is a dictionary. :vartype data_transfer_sinks: dict[str, ~flow.models.AetherDataTransferSink] :ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :vartype data_copy_mode: str or ~flow.models.AetherDataCopyMode """ _attribute_map = { 'task_type': {'key': 'taskType', 'type': 'str'}, 'compute_name': {'key': 'ComputeName', 'type': 'str'}, 'copy_data_task': {'key': 'CopyDataTask', 'type': 'AetherCopyDataTask'}, 'import_data_task': {'key': 'ImportDataTask', 'type': 'AetherImportDataTask'}, 'export_data_task': {'key': 'ExportDataTask', 'type': 'AetherExportDataTask'}, 'data_transfer_sources': {'key': 'DataTransferSources', 'type': '{AetherDataTransferSource}'}, 'data_transfer_sinks': {'key': 'DataTransferSinks', 'type': '{AetherDataTransferSink}'}, 'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'}, } def __init__( self, *, task_type: Optional[Union[str, "AetherDataTransferTaskType"]] = None, compute_name: Optional[str] = None, copy_data_task: Optional["AetherCopyDataTask"] = None, import_data_task: Optional["AetherImportDataTask"] = None, export_data_task: Optional["AetherExportDataTask"] = None, data_transfer_sources: Optional[Dict[str, "AetherDataTransferSource"]] = None, data_transfer_sinks: Optional[Dict[str, "AetherDataTransferSink"]] = None, data_copy_mode: Optional[Union[str, "AetherDataCopyMode"]] = None, **kwargs ): """ :keyword task_type: Possible values include: "ImportData", "ExportData", "CopyData". :paramtype task_type: str or ~flow.models.AetherDataTransferTaskType :keyword compute_name: :paramtype compute_name: str :keyword copy_data_task: :paramtype copy_data_task: ~flow.models.AetherCopyDataTask :keyword import_data_task: :paramtype import_data_task: ~flow.models.AetherImportDataTask :keyword export_data_task: :paramtype export_data_task: ~flow.models.AetherExportDataTask :keyword data_transfer_sources: This is a dictionary. :paramtype data_transfer_sources: dict[str, ~flow.models.AetherDataTransferSource] :keyword data_transfer_sinks: This is a dictionary. :paramtype data_transfer_sinks: dict[str, ~flow.models.AetherDataTransferSink] :keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :paramtype data_copy_mode: str or ~flow.models.AetherDataCopyMode """ super(AetherDataTransferV2CloudSetting, self).__init__(**kwargs) self.task_type = task_type self.compute_name = compute_name self.copy_data_task = copy_data_task self.import_data_task = import_data_task self.export_data_task = export_data_task self.data_transfer_sources = data_transfer_sources self.data_transfer_sinks = data_transfer_sinks self.data_copy_mode = data_copy_mode class AetherDBFSReference(msrest.serialization.Model): """AetherDBFSReference. :ivar relative_path: :vartype relative_path: str :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, *, relative_path: Optional[str] = None, aml_data_store_name: Optional[str] = None, **kwargs ): """ :keyword relative_path: :paramtype relative_path: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AetherDBFSReference, self).__init__(**kwargs) self.relative_path = relative_path self.aml_data_store_name = aml_data_store_name class AetherDockerSettingConfiguration(msrest.serialization.Model): """AetherDockerSettingConfiguration. :ivar use_docker: :vartype use_docker: bool :ivar shared_volumes: :vartype shared_volumes: bool :ivar shm_size: :vartype shm_size: str :ivar arguments: :vartype arguments: list[str] """ _attribute_map = { 'use_docker': {'key': 'useDocker', 'type': 'bool'}, 'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'}, 'shm_size': {'key': 'shmSize', 'type': 'str'}, 'arguments': {'key': 'arguments', 'type': '[str]'}, } def __init__( self, *, use_docker: Optional[bool] = None, shared_volumes: Optional[bool] = None, shm_size: Optional[str] = None, arguments: Optional[List[str]] = None, **kwargs ): """ :keyword use_docker: :paramtype use_docker: bool :keyword shared_volumes: :paramtype shared_volumes: bool :keyword shm_size: :paramtype shm_size: str :keyword arguments: :paramtype arguments: list[str] """ super(AetherDockerSettingConfiguration, self).__init__(**kwargs) self.use_docker = use_docker self.shared_volumes = shared_volumes self.shm_size = shm_size self.arguments = arguments class AetherDoWhileControlFlowInfo(msrest.serialization.Model): """AetherDoWhileControlFlowInfo. :ivar output_port_name_to_input_port_names_mapping: Dictionary of <componentsΒ·1f2aigmΒ·schemasΒ·aetherdowhilecontrolflowinfoΒ·propertiesΒ·outputportnametoinputportnamesmappingΒ·additionalproperties>. :vartype output_port_name_to_input_port_names_mapping: dict[str, list[str]] :ivar condition_output_port_name: :vartype condition_output_port_name: str :ivar run_settings: :vartype run_settings: ~flow.models.AetherDoWhileControlFlowRunSettings """ _attribute_map = { 'output_port_name_to_input_port_names_mapping': {'key': 'outputPortNameToInputPortNamesMapping', 'type': '{[str]}'}, 'condition_output_port_name': {'key': 'conditionOutputPortName', 'type': 'str'}, 'run_settings': {'key': 'runSettings', 'type': 'AetherDoWhileControlFlowRunSettings'}, } def __init__( self, *, output_port_name_to_input_port_names_mapping: Optional[Dict[str, List[str]]] = None, condition_output_port_name: Optional[str] = None, run_settings: Optional["AetherDoWhileControlFlowRunSettings"] = None, **kwargs ): """ :keyword output_port_name_to_input_port_names_mapping: Dictionary of <componentsΒ·1f2aigmΒ·schemasΒ·aetherdowhilecontrolflowinfoΒ·propertiesΒ·outputportnametoinputportnamesmappingΒ·additionalproperties>. :paramtype output_port_name_to_input_port_names_mapping: dict[str, list[str]] :keyword condition_output_port_name: :paramtype condition_output_port_name: str :keyword run_settings: :paramtype run_settings: ~flow.models.AetherDoWhileControlFlowRunSettings """ super(AetherDoWhileControlFlowInfo, self).__init__(**kwargs) self.output_port_name_to_input_port_names_mapping = output_port_name_to_input_port_names_mapping self.condition_output_port_name = condition_output_port_name self.run_settings = run_settings class AetherDoWhileControlFlowRunSettings(msrest.serialization.Model): """AetherDoWhileControlFlowRunSettings. :ivar max_loop_iteration_count: :vartype max_loop_iteration_count: ~flow.models.AetherParameterAssignment """ _attribute_map = { 'max_loop_iteration_count': {'key': 'maxLoopIterationCount', 'type': 'AetherParameterAssignment'}, } def __init__( self, *, max_loop_iteration_count: Optional["AetherParameterAssignment"] = None, **kwargs ): """ :keyword max_loop_iteration_count: :paramtype max_loop_iteration_count: ~flow.models.AetherParameterAssignment """ super(AetherDoWhileControlFlowRunSettings, self).__init__(**kwargs) self.max_loop_iteration_count = max_loop_iteration_count class AetherEntityInterfaceDocumentation(msrest.serialization.Model): """AetherEntityInterfaceDocumentation. :ivar inputs_documentation: Dictionary of :code:`<string>`. :vartype inputs_documentation: dict[str, str] :ivar outputs_documentation: Dictionary of :code:`<string>`. :vartype outputs_documentation: dict[str, str] :ivar parameters_documentation: Dictionary of :code:`<string>`. :vartype parameters_documentation: dict[str, str] """ _attribute_map = { 'inputs_documentation': {'key': 'inputsDocumentation', 'type': '{str}'}, 'outputs_documentation': {'key': 'outputsDocumentation', 'type': '{str}'}, 'parameters_documentation': {'key': 'parametersDocumentation', 'type': '{str}'}, } def __init__( self, *, inputs_documentation: Optional[Dict[str, str]] = None, outputs_documentation: Optional[Dict[str, str]] = None, parameters_documentation: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword inputs_documentation: Dictionary of :code:`<string>`. :paramtype inputs_documentation: dict[str, str] :keyword outputs_documentation: Dictionary of :code:`<string>`. :paramtype outputs_documentation: dict[str, str] :keyword parameters_documentation: Dictionary of :code:`<string>`. :paramtype parameters_documentation: dict[str, str] """ super(AetherEntityInterfaceDocumentation, self).__init__(**kwargs) self.inputs_documentation = inputs_documentation self.outputs_documentation = outputs_documentation self.parameters_documentation = parameters_documentation class AetherEntrySetting(msrest.serialization.Model): """AetherEntrySetting. :ivar file: :vartype file: str :ivar class_name: :vartype class_name: str """ _attribute_map = { 'file': {'key': 'file', 'type': 'str'}, 'class_name': {'key': 'className', 'type': 'str'}, } def __init__( self, *, file: Optional[str] = None, class_name: Optional[str] = None, **kwargs ): """ :keyword file: :paramtype file: str :keyword class_name: :paramtype class_name: str """ super(AetherEntrySetting, self).__init__(**kwargs) self.file = file self.class_name = class_name class AetherEnvironmentConfiguration(msrest.serialization.Model): """AetherEnvironmentConfiguration. :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar use_environment_definition: :vartype use_environment_definition: bool :ivar environment_definition_string: :vartype environment_definition_string: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'use_environment_definition': {'key': 'useEnvironmentDefinition', 'type': 'bool'}, 'environment_definition_string': {'key': 'environmentDefinitionString', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, version: Optional[str] = None, use_environment_definition: Optional[bool] = None, environment_definition_string: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword use_environment_definition: :paramtype use_environment_definition: bool :keyword environment_definition_string: :paramtype environment_definition_string: str """ super(AetherEnvironmentConfiguration, self).__init__(**kwargs) self.name = name self.version = version self.use_environment_definition = use_environment_definition self.environment_definition_string = environment_definition_string class AetherEsCloudConfiguration(msrest.serialization.Model): """AetherEsCloudConfiguration. :ivar enable_output_to_file_based_on_data_type_id: :vartype enable_output_to_file_based_on_data_type_id: bool :ivar aml_compute_priority_internal: :vartype aml_compute_priority_internal: ~flow.models.AetherPriorityConfiguration :ivar itp_priority_internal: :vartype itp_priority_internal: ~flow.models.AetherPriorityConfiguration :ivar singularity_priority_internal: :vartype singularity_priority_internal: ~flow.models.AetherPriorityConfiguration :ivar environment: :vartype environment: ~flow.models.AetherEnvironmentConfiguration :ivar hyper_drive_configuration: :vartype hyper_drive_configuration: ~flow.models.AetherHyperDriveConfiguration :ivar k8_s_config: :vartype k8_s_config: ~flow.models.AetherK8SConfiguration :ivar resource_config: :vartype resource_config: ~flow.models.AetherResourceConfiguration :ivar torch_distributed_config: :vartype torch_distributed_config: ~flow.models.AetherTorchDistributedConfiguration :ivar target_selector_config: :vartype target_selector_config: ~flow.models.AetherTargetSelectorConfiguration :ivar docker_config: :vartype docker_config: ~flow.models.AetherDockerSettingConfiguration :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar max_run_duration_seconds: :vartype max_run_duration_seconds: int :ivar identity: :vartype identity: ~flow.models.AetherIdentitySetting :ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`. :vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration] :ivar run_config: :vartype run_config: str """ _attribute_map = { 'enable_output_to_file_based_on_data_type_id': {'key': 'enableOutputToFileBasedOnDataTypeId', 'type': 'bool'}, 'aml_compute_priority_internal': {'key': 'amlComputePriorityInternal', 'type': 'AetherPriorityConfiguration'}, 'itp_priority_internal': {'key': 'itpPriorityInternal', 'type': 'AetherPriorityConfiguration'}, 'singularity_priority_internal': {'key': 'singularityPriorityInternal', 'type': 'AetherPriorityConfiguration'}, 'environment': {'key': 'environment', 'type': 'AetherEnvironmentConfiguration'}, 'hyper_drive_configuration': {'key': 'hyperDriveConfiguration', 'type': 'AetherHyperDriveConfiguration'}, 'k8_s_config': {'key': 'k8sConfig', 'type': 'AetherK8SConfiguration'}, 'resource_config': {'key': 'resourceConfig', 'type': 'AetherResourceConfiguration'}, 'torch_distributed_config': {'key': 'torchDistributedConfig', 'type': 'AetherTorchDistributedConfiguration'}, 'target_selector_config': {'key': 'targetSelectorConfig', 'type': 'AetherTargetSelectorConfiguration'}, 'docker_config': {'key': 'dockerConfig', 'type': 'AetherDockerSettingConfiguration'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'int'}, 'identity': {'key': 'identity', 'type': 'AetherIdentitySetting'}, 'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'}, 'run_config': {'key': 'runConfig', 'type': 'str'}, } def __init__( self, *, enable_output_to_file_based_on_data_type_id: Optional[bool] = None, aml_compute_priority_internal: Optional["AetherPriorityConfiguration"] = None, itp_priority_internal: Optional["AetherPriorityConfiguration"] = None, singularity_priority_internal: Optional["AetherPriorityConfiguration"] = None, environment: Optional["AetherEnvironmentConfiguration"] = None, hyper_drive_configuration: Optional["AetherHyperDriveConfiguration"] = None, k8_s_config: Optional["AetherK8SConfiguration"] = None, resource_config: Optional["AetherResourceConfiguration"] = None, torch_distributed_config: Optional["AetherTorchDistributedConfiguration"] = None, target_selector_config: Optional["AetherTargetSelectorConfiguration"] = None, docker_config: Optional["AetherDockerSettingConfiguration"] = None, environment_variables: Optional[Dict[str, str]] = None, max_run_duration_seconds: Optional[int] = None, identity: Optional["AetherIdentitySetting"] = None, application_endpoints: Optional[Dict[str, "ApplicationEndpointConfiguration"]] = None, run_config: Optional[str] = None, **kwargs ): """ :keyword enable_output_to_file_based_on_data_type_id: :paramtype enable_output_to_file_based_on_data_type_id: bool :keyword aml_compute_priority_internal: :paramtype aml_compute_priority_internal: ~flow.models.AetherPriorityConfiguration :keyword itp_priority_internal: :paramtype itp_priority_internal: ~flow.models.AetherPriorityConfiguration :keyword singularity_priority_internal: :paramtype singularity_priority_internal: ~flow.models.AetherPriorityConfiguration :keyword environment: :paramtype environment: ~flow.models.AetherEnvironmentConfiguration :keyword hyper_drive_configuration: :paramtype hyper_drive_configuration: ~flow.models.AetherHyperDriveConfiguration :keyword k8_s_config: :paramtype k8_s_config: ~flow.models.AetherK8SConfiguration :keyword resource_config: :paramtype resource_config: ~flow.models.AetherResourceConfiguration :keyword torch_distributed_config: :paramtype torch_distributed_config: ~flow.models.AetherTorchDistributedConfiguration :keyword target_selector_config: :paramtype target_selector_config: ~flow.models.AetherTargetSelectorConfiguration :keyword docker_config: :paramtype docker_config: ~flow.models.AetherDockerSettingConfiguration :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword max_run_duration_seconds: :paramtype max_run_duration_seconds: int :keyword identity: :paramtype identity: ~flow.models.AetherIdentitySetting :keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`. :paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration] :keyword run_config: :paramtype run_config: str """ super(AetherEsCloudConfiguration, self).__init__(**kwargs) self.enable_output_to_file_based_on_data_type_id = enable_output_to_file_based_on_data_type_id self.aml_compute_priority_internal = aml_compute_priority_internal self.itp_priority_internal = itp_priority_internal self.singularity_priority_internal = singularity_priority_internal self.environment = environment self.hyper_drive_configuration = hyper_drive_configuration self.k8_s_config = k8_s_config self.resource_config = resource_config self.torch_distributed_config = torch_distributed_config self.target_selector_config = target_selector_config self.docker_config = docker_config self.environment_variables = environment_variables self.max_run_duration_seconds = max_run_duration_seconds self.identity = identity self.application_endpoints = application_endpoints self.run_config = run_config class AetherExportDataTask(msrest.serialization.Model): """AetherExportDataTask. :ivar data_transfer_sink: :vartype data_transfer_sink: ~flow.models.AetherDataTransferSink """ _attribute_map = { 'data_transfer_sink': {'key': 'DataTransferSink', 'type': 'AetherDataTransferSink'}, } def __init__( self, *, data_transfer_sink: Optional["AetherDataTransferSink"] = None, **kwargs ): """ :keyword data_transfer_sink: :paramtype data_transfer_sink: ~flow.models.AetherDataTransferSink """ super(AetherExportDataTask, self).__init__(**kwargs) self.data_transfer_sink = data_transfer_sink class AetherFeaturizationSettings(msrest.serialization.Model): """AetherFeaturizationSettings. :ivar mode: Possible values include: "Auto", "Custom", "Off". :vartype mode: str or ~flow.models.AetherFeaturizationMode :ivar blocked_transformers: :vartype blocked_transformers: list[str] :ivar column_purposes: Dictionary of :code:`<string>`. :vartype column_purposes: dict[str, str] :ivar drop_columns: :vartype drop_columns: list[str] :ivar transformer_params: Dictionary of <componentsΒ·1y90i4mΒ·schemasΒ·aetherfeaturizationsettingsΒ·propertiesΒ·transformerparamsΒ·additionalproperties>. :vartype transformer_params: dict[str, list[~flow.models.AetherColumnTransformer]] :ivar dataset_language: :vartype dataset_language: str :ivar enable_dnn_featurization: :vartype enable_dnn_featurization: bool """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'}, 'column_purposes': {'key': 'columnPurposes', 'type': '{str}'}, 'drop_columns': {'key': 'dropColumns', 'type': '[str]'}, 'transformer_params': {'key': 'transformerParams', 'type': '{[AetherColumnTransformer]}'}, 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, 'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'}, } def __init__( self, *, mode: Optional[Union[str, "AetherFeaturizationMode"]] = None, blocked_transformers: Optional[List[str]] = None, column_purposes: Optional[Dict[str, str]] = None, drop_columns: Optional[List[str]] = None, transformer_params: Optional[Dict[str, List["AetherColumnTransformer"]]] = None, dataset_language: Optional[str] = None, enable_dnn_featurization: Optional[bool] = None, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom", "Off". :paramtype mode: str or ~flow.models.AetherFeaturizationMode :keyword blocked_transformers: :paramtype blocked_transformers: list[str] :keyword column_purposes: Dictionary of :code:`<string>`. :paramtype column_purposes: dict[str, str] :keyword drop_columns: :paramtype drop_columns: list[str] :keyword transformer_params: Dictionary of <componentsΒ·1y90i4mΒ·schemasΒ·aetherfeaturizationsettingsΒ·propertiesΒ·transformerparamsΒ·additionalproperties>. :paramtype transformer_params: dict[str, list[~flow.models.AetherColumnTransformer]] :keyword dataset_language: :paramtype dataset_language: str :keyword enable_dnn_featurization: :paramtype enable_dnn_featurization: bool """ super(AetherFeaturizationSettings, self).__init__(**kwargs) self.mode = mode self.blocked_transformers = blocked_transformers self.column_purposes = column_purposes self.drop_columns = drop_columns self.transformer_params = transformer_params self.dataset_language = dataset_language self.enable_dnn_featurization = enable_dnn_featurization class AetherFileSystem(msrest.serialization.Model): """AetherFileSystem. :ivar connection: :vartype connection: str :ivar path: :vartype path: str """ _attribute_map = { 'connection': {'key': 'connection', 'type': 'str'}, 'path': {'key': 'path', 'type': 'str'}, } def __init__( self, *, connection: Optional[str] = None, path: Optional[str] = None, **kwargs ): """ :keyword connection: :paramtype connection: str :keyword path: :paramtype path: str """ super(AetherFileSystem, self).__init__(**kwargs) self.connection = connection self.path = path class AetherForecastHorizon(msrest.serialization.Model): """AetherForecastHorizon. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.AetherForecastHorizonMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, *, mode: Optional[Union[str, "AetherForecastHorizonMode"]] = None, value: Optional[int] = None, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.AetherForecastHorizonMode :keyword value: :paramtype value: int """ super(AetherForecastHorizon, self).__init__(**kwargs) self.mode = mode self.value = value class AetherForecastingSettings(msrest.serialization.Model): """AetherForecastingSettings. :ivar country_or_region_for_holidays: :vartype country_or_region_for_holidays: str :ivar time_column_name: :vartype time_column_name: str :ivar target_lags: :vartype target_lags: ~flow.models.AetherTargetLags :ivar target_rolling_window_size: :vartype target_rolling_window_size: ~flow.models.AetherTargetRollingWindowSize :ivar forecast_horizon: :vartype forecast_horizon: ~flow.models.AetherForecastHorizon :ivar time_series_id_column_names: :vartype time_series_id_column_names: list[str] :ivar frequency: :vartype frequency: str :ivar feature_lags: :vartype feature_lags: str :ivar seasonality: :vartype seasonality: ~flow.models.AetherSeasonality :ivar short_series_handling_config: Possible values include: "Auto", "Pad", "Drop". :vartype short_series_handling_config: str or ~flow.models.AetherShortSeriesHandlingConfiguration :ivar use_stl: Possible values include: "Season", "SeasonTrend". :vartype use_stl: str or ~flow.models.AetherUseStl :ivar target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean". :vartype target_aggregate_function: str or ~flow.models.AetherTargetAggregationFunction :ivar cv_step_size: :vartype cv_step_size: int :ivar features_unknown_at_forecast_time: :vartype features_unknown_at_forecast_time: list[str] """ _attribute_map = { 'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'}, 'time_column_name': {'key': 'timeColumnName', 'type': 'str'}, 'target_lags': {'key': 'targetLags', 'type': 'AetherTargetLags'}, 'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'AetherTargetRollingWindowSize'}, 'forecast_horizon': {'key': 'forecastHorizon', 'type': 'AetherForecastHorizon'}, 'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'}, 'frequency': {'key': 'frequency', 'type': 'str'}, 'feature_lags': {'key': 'featureLags', 'type': 'str'}, 'seasonality': {'key': 'seasonality', 'type': 'AetherSeasonality'}, 'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'}, 'use_stl': {'key': 'useStl', 'type': 'str'}, 'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'}, 'cv_step_size': {'key': 'cvStepSize', 'type': 'int'}, 'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'}, } def __init__( self, *, country_or_region_for_holidays: Optional[str] = None, time_column_name: Optional[str] = None, target_lags: Optional["AetherTargetLags"] = None, target_rolling_window_size: Optional["AetherTargetRollingWindowSize"] = None, forecast_horizon: Optional["AetherForecastHorizon"] = None, time_series_id_column_names: Optional[List[str]] = None, frequency: Optional[str] = None, feature_lags: Optional[str] = None, seasonality: Optional["AetherSeasonality"] = None, short_series_handling_config: Optional[Union[str, "AetherShortSeriesHandlingConfiguration"]] = None, use_stl: Optional[Union[str, "AetherUseStl"]] = None, target_aggregate_function: Optional[Union[str, "AetherTargetAggregationFunction"]] = None, cv_step_size: Optional[int] = None, features_unknown_at_forecast_time: Optional[List[str]] = None, **kwargs ): """ :keyword country_or_region_for_holidays: :paramtype country_or_region_for_holidays: str :keyword time_column_name: :paramtype time_column_name: str :keyword target_lags: :paramtype target_lags: ~flow.models.AetherTargetLags :keyword target_rolling_window_size: :paramtype target_rolling_window_size: ~flow.models.AetherTargetRollingWindowSize :keyword forecast_horizon: :paramtype forecast_horizon: ~flow.models.AetherForecastHorizon :keyword time_series_id_column_names: :paramtype time_series_id_column_names: list[str] :keyword frequency: :paramtype frequency: str :keyword feature_lags: :paramtype feature_lags: str :keyword seasonality: :paramtype seasonality: ~flow.models.AetherSeasonality :keyword short_series_handling_config: Possible values include: "Auto", "Pad", "Drop". :paramtype short_series_handling_config: str or ~flow.models.AetherShortSeriesHandlingConfiguration :keyword use_stl: Possible values include: "Season", "SeasonTrend". :paramtype use_stl: str or ~flow.models.AetherUseStl :keyword target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean". :paramtype target_aggregate_function: str or ~flow.models.AetherTargetAggregationFunction :keyword cv_step_size: :paramtype cv_step_size: int :keyword features_unknown_at_forecast_time: :paramtype features_unknown_at_forecast_time: list[str] """ super(AetherForecastingSettings, self).__init__(**kwargs) self.country_or_region_for_holidays = country_or_region_for_holidays self.time_column_name = time_column_name self.target_lags = target_lags self.target_rolling_window_size = target_rolling_window_size self.forecast_horizon = forecast_horizon self.time_series_id_column_names = time_series_id_column_names self.frequency = frequency self.feature_lags = feature_lags self.seasonality = seasonality self.short_series_handling_config = short_series_handling_config self.use_stl = use_stl self.target_aggregate_function = target_aggregate_function self.cv_step_size = cv_step_size self.features_unknown_at_forecast_time = features_unknown_at_forecast_time class AetherGeneralSettings(msrest.serialization.Model): """AetherGeneralSettings. :ivar primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError", "NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou". :vartype primary_metric: str or ~flow.models.AetherPrimaryMetrics :ivar task_type: Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER", "TextClassificationMultilabel". :vartype task_type: str or ~flow.models.AetherTaskType :ivar log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error", "Critical". :vartype log_verbosity: str or ~flow.models.AetherLogVerbosity """ _attribute_map = { 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, 'task_type': {'key': 'taskType', 'type': 'str'}, 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, } def __init__( self, *, primary_metric: Optional[Union[str, "AetherPrimaryMetrics"]] = None, task_type: Optional[Union[str, "AetherTaskType"]] = None, log_verbosity: Optional[Union[str, "AetherLogVerbosity"]] = None, **kwargs ): """ :keyword primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError", "NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou". :paramtype primary_metric: str or ~flow.models.AetherPrimaryMetrics :keyword task_type: Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER", "TextClassificationMultilabel". :paramtype task_type: str or ~flow.models.AetherTaskType :keyword log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error", "Critical". :paramtype log_verbosity: str or ~flow.models.AetherLogVerbosity """ super(AetherGeneralSettings, self).__init__(**kwargs) self.primary_metric = primary_metric self.task_type = task_type self.log_verbosity = log_verbosity class AetherGlobsOptions(msrest.serialization.Model): """AetherGlobsOptions. :ivar glob_patterns: :vartype glob_patterns: list[str] """ _attribute_map = { 'glob_patterns': {'key': 'globPatterns', 'type': '[str]'}, } def __init__( self, *, glob_patterns: Optional[List[str]] = None, **kwargs ): """ :keyword glob_patterns: :paramtype glob_patterns: list[str] """ super(AetherGlobsOptions, self).__init__(**kwargs) self.glob_patterns = glob_patterns class AetherGraphControlNode(msrest.serialization.Model): """AetherGraphControlNode. :ivar id: :vartype id: str :ivar control_type: The only acceptable values to pass in are None and "IfElse". The default value is None. :vartype control_type: str :ivar control_parameter: :vartype control_parameter: ~flow.models.AetherParameterAssignment :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'control_type': {'key': 'controlType', 'type': 'str'}, 'control_parameter': {'key': 'controlParameter', 'type': 'AetherParameterAssignment'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, control_type: Optional[str] = None, control_parameter: Optional["AetherParameterAssignment"] = None, run_attribution: Optional[str] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword control_type: The only acceptable values to pass in are None and "IfElse". The default value is None. :paramtype control_type: str :keyword control_parameter: :paramtype control_parameter: ~flow.models.AetherParameterAssignment :keyword run_attribution: :paramtype run_attribution: str """ super(AetherGraphControlNode, self).__init__(**kwargs) self.id = id self.control_type = control_type self.control_parameter = control_parameter self.run_attribution = run_attribution class AetherGraphControlReferenceNode(msrest.serialization.Model): """AetherGraphControlReferenceNode. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar comment: :vartype comment: str :ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor". :vartype control_flow_type: str or ~flow.models.AetherControlFlowType :ivar reference_node_id: :vartype reference_node_id: str :ivar do_while_control_flow_info: :vartype do_while_control_flow_info: ~flow.models.AetherDoWhileControlFlowInfo :ivar parallel_for_control_flow_info: :vartype parallel_for_control_flow_info: ~flow.models.AetherParallelForControlFlowInfo :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'comment': {'key': 'comment', 'type': 'str'}, 'control_flow_type': {'key': 'controlFlowType', 'type': 'str'}, 'reference_node_id': {'key': 'referenceNodeId', 'type': 'str'}, 'do_while_control_flow_info': {'key': 'doWhileControlFlowInfo', 'type': 'AetherDoWhileControlFlowInfo'}, 'parallel_for_control_flow_info': {'key': 'parallelForControlFlowInfo', 'type': 'AetherParallelForControlFlowInfo'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, comment: Optional[str] = None, control_flow_type: Optional[Union[str, "AetherControlFlowType"]] = None, reference_node_id: Optional[str] = None, do_while_control_flow_info: Optional["AetherDoWhileControlFlowInfo"] = None, parallel_for_control_flow_info: Optional["AetherParallelForControlFlowInfo"] = None, run_attribution: Optional[str] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword comment: :paramtype comment: str :keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor". :paramtype control_flow_type: str or ~flow.models.AetherControlFlowType :keyword reference_node_id: :paramtype reference_node_id: str :keyword do_while_control_flow_info: :paramtype do_while_control_flow_info: ~flow.models.AetherDoWhileControlFlowInfo :keyword parallel_for_control_flow_info: :paramtype parallel_for_control_flow_info: ~flow.models.AetherParallelForControlFlowInfo :keyword run_attribution: :paramtype run_attribution: str """ super(AetherGraphControlReferenceNode, self).__init__(**kwargs) self.id = id self.name = name self.comment = comment self.control_flow_type = control_flow_type self.reference_node_id = reference_node_id self.do_while_control_flow_info = do_while_control_flow_info self.parallel_for_control_flow_info = parallel_for_control_flow_info self.run_attribution = run_attribution class AetherGraphDatasetNode(msrest.serialization.Model): """AetherGraphDatasetNode. :ivar id: :vartype id: str :ivar dataset_id: :vartype dataset_id: str :ivar data_path_parameter_name: :vartype data_path_parameter_name: str :ivar data_set_definition: :vartype data_set_definition: ~flow.models.AetherDataSetDefinition """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'dataset_id': {'key': 'datasetId', 'type': 'str'}, 'data_path_parameter_name': {'key': 'dataPathParameterName', 'type': 'str'}, 'data_set_definition': {'key': 'dataSetDefinition', 'type': 'AetherDataSetDefinition'}, } def __init__( self, *, id: Optional[str] = None, dataset_id: Optional[str] = None, data_path_parameter_name: Optional[str] = None, data_set_definition: Optional["AetherDataSetDefinition"] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword dataset_id: :paramtype dataset_id: str :keyword data_path_parameter_name: :paramtype data_path_parameter_name: str :keyword data_set_definition: :paramtype data_set_definition: ~flow.models.AetherDataSetDefinition """ super(AetherGraphDatasetNode, self).__init__(**kwargs) self.id = id self.dataset_id = dataset_id self.data_path_parameter_name = data_path_parameter_name self.data_set_definition = data_set_definition class AetherGraphEdge(msrest.serialization.Model): """AetherGraphEdge. :ivar source_output_port: :vartype source_output_port: ~flow.models.AetherPortInfo :ivar destination_input_port: :vartype destination_input_port: ~flow.models.AetherPortInfo """ _attribute_map = { 'source_output_port': {'key': 'sourceOutputPort', 'type': 'AetherPortInfo'}, 'destination_input_port': {'key': 'destinationInputPort', 'type': 'AetherPortInfo'}, } def __init__( self, *, source_output_port: Optional["AetherPortInfo"] = None, destination_input_port: Optional["AetherPortInfo"] = None, **kwargs ): """ :keyword source_output_port: :paramtype source_output_port: ~flow.models.AetherPortInfo :keyword destination_input_port: :paramtype destination_input_port: ~flow.models.AetherPortInfo """ super(AetherGraphEdge, self).__init__(**kwargs) self.source_output_port = source_output_port self.destination_input_port = destination_input_port class AetherGraphEntity(msrest.serialization.Model): """AetherGraphEntity. :ivar module_nodes: :vartype module_nodes: list[~flow.models.AetherGraphModuleNode] :ivar dataset_nodes: :vartype dataset_nodes: list[~flow.models.AetherGraphDatasetNode] :ivar sub_graph_nodes: :vartype sub_graph_nodes: list[~flow.models.AetherGraphReferenceNode] :ivar control_reference_nodes: :vartype control_reference_nodes: list[~flow.models.AetherGraphControlReferenceNode] :ivar control_nodes: :vartype control_nodes: list[~flow.models.AetherGraphControlNode] :ivar edges: :vartype edges: list[~flow.models.AetherGraphEdge] :ivar default_compute: :vartype default_compute: ~flow.models.AetherComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.AetherDatastoreSetting :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting :ivar parent_sub_graph_module_ids: :vartype parent_sub_graph_module_ids: list[str] :ivar id: :vartype id: str :ivar workspace_id: :vartype workspace_id: str :ivar etag: :vartype etag: str :ivar tags: A set of tags. :vartype tags: list[str] :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.AetherEntityStatus """ _attribute_map = { 'module_nodes': {'key': 'moduleNodes', 'type': '[AetherGraphModuleNode]'}, 'dataset_nodes': {'key': 'datasetNodes', 'type': '[AetherGraphDatasetNode]'}, 'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[AetherGraphReferenceNode]'}, 'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[AetherGraphControlReferenceNode]'}, 'control_nodes': {'key': 'controlNodes', 'type': '[AetherGraphControlNode]'}, 'edges': {'key': 'edges', 'type': '[AetherGraphEdge]'}, 'default_compute': {'key': 'defaultCompute', 'type': 'AetherComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'AetherDatastoreSetting'}, 'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'AetherCloudPrioritySetting'}, 'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'}, 'id': {'key': 'id', 'type': 'str'}, 'workspace_id': {'key': 'workspaceId', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '[str]'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, } def __init__( self, *, module_nodes: Optional[List["AetherGraphModuleNode"]] = None, dataset_nodes: Optional[List["AetherGraphDatasetNode"]] = None, sub_graph_nodes: Optional[List["AetherGraphReferenceNode"]] = None, control_reference_nodes: Optional[List["AetherGraphControlReferenceNode"]] = None, control_nodes: Optional[List["AetherGraphControlNode"]] = None, edges: Optional[List["AetherGraphEdge"]] = None, default_compute: Optional["AetherComputeSetting"] = None, default_datastore: Optional["AetherDatastoreSetting"] = None, default_cloud_priority: Optional["AetherCloudPrioritySetting"] = None, parent_sub_graph_module_ids: Optional[List[str]] = None, id: Optional[str] = None, workspace_id: Optional[str] = None, etag: Optional[str] = None, tags: Optional[List[str]] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, entity_status: Optional[Union[str, "AetherEntityStatus"]] = None, **kwargs ): """ :keyword module_nodes: :paramtype module_nodes: list[~flow.models.AetherGraphModuleNode] :keyword dataset_nodes: :paramtype dataset_nodes: list[~flow.models.AetherGraphDatasetNode] :keyword sub_graph_nodes: :paramtype sub_graph_nodes: list[~flow.models.AetherGraphReferenceNode] :keyword control_reference_nodes: :paramtype control_reference_nodes: list[~flow.models.AetherGraphControlReferenceNode] :keyword control_nodes: :paramtype control_nodes: list[~flow.models.AetherGraphControlNode] :keyword edges: :paramtype edges: list[~flow.models.AetherGraphEdge] :keyword default_compute: :paramtype default_compute: ~flow.models.AetherComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.AetherDatastoreSetting :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting :keyword parent_sub_graph_module_ids: :paramtype parent_sub_graph_module_ids: list[str] :keyword id: :paramtype id: str :keyword workspace_id: :paramtype workspace_id: str :keyword etag: :paramtype etag: str :keyword tags: A set of tags. :paramtype tags: list[str] :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.AetherEntityStatus """ super(AetherGraphEntity, self).__init__(**kwargs) self.module_nodes = module_nodes self.dataset_nodes = dataset_nodes self.sub_graph_nodes = sub_graph_nodes self.control_reference_nodes = control_reference_nodes self.control_nodes = control_nodes self.edges = edges self.default_compute = default_compute self.default_datastore = default_datastore self.default_cloud_priority = default_cloud_priority self.parent_sub_graph_module_ids = parent_sub_graph_module_ids self.id = id self.workspace_id = workspace_id self.etag = etag self.tags = tags self.created_date = created_date self.last_modified_date = last_modified_date self.entity_status = entity_status class AetherGraphModuleNode(msrest.serialization.Model): """AetherGraphModuleNode. :ivar cloud_priority: :vartype cloud_priority: int :ivar default_data_retention_hint: :vartype default_data_retention_hint: int :ivar compliance_cluster: :vartype compliance_cluster: str :ivar euclid_workspace_id: :vartype euclid_workspace_id: str :ivar attached_modules: :vartype attached_modules: list[str] :ivar acceptable_machine_clusters: :vartype acceptable_machine_clusters: list[str] :ivar custom_data_location_id: :vartype custom_data_location_id: str :ivar alert_timeout_duration: :vartype alert_timeout_duration: str :ivar runconfig: :vartype runconfig: str :ivar id: :vartype id: str :ivar module_id: :vartype module_id: str :ivar comment: :vartype comment: str :ivar name: :vartype name: str :ivar module_parameters: :vartype module_parameters: list[~flow.models.AetherParameterAssignment] :ivar module_metadata_parameters: :vartype module_metadata_parameters: list[~flow.models.AetherParameterAssignment] :ivar module_output_settings: :vartype module_output_settings: list[~flow.models.AetherOutputSetting] :ivar module_input_settings: :vartype module_input_settings: list[~flow.models.AetherInputSetting] :ivar use_graph_default_compute: :vartype use_graph_default_compute: bool :ivar use_graph_default_datastore: :vartype use_graph_default_datastore: bool :ivar regenerate_output: :vartype regenerate_output: bool :ivar control_inputs: :vartype control_inputs: list[~flow.models.AetherControlInput] :ivar cloud_settings: :vartype cloud_settings: ~flow.models.AetherCloudSettings :ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :vartype execution_phase: str or ~flow.models.AetherExecutionPhase :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'cloud_priority': {'key': 'cloudPriority', 'type': 'int'}, 'default_data_retention_hint': {'key': 'defaultDataRetentionHint', 'type': 'int'}, 'compliance_cluster': {'key': 'complianceCluster', 'type': 'str'}, 'euclid_workspace_id': {'key': 'euclidWorkspaceId', 'type': 'str'}, 'attached_modules': {'key': 'attachedModules', 'type': '[str]'}, 'acceptable_machine_clusters': {'key': 'acceptableMachineClusters', 'type': '[str]'}, 'custom_data_location_id': {'key': 'customDataLocationId', 'type': 'str'}, 'alert_timeout_duration': {'key': 'alertTimeoutDuration', 'type': 'str'}, 'runconfig': {'key': 'runconfig', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'module_id': {'key': 'moduleId', 'type': 'str'}, 'comment': {'key': 'comment', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'module_parameters': {'key': 'moduleParameters', 'type': '[AetherParameterAssignment]'}, 'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[AetherParameterAssignment]'}, 'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[AetherOutputSetting]'}, 'module_input_settings': {'key': 'moduleInputSettings', 'type': '[AetherInputSetting]'}, 'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'}, 'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'}, 'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'}, 'control_inputs': {'key': 'controlInputs', 'type': '[AetherControlInput]'}, 'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'}, 'execution_phase': {'key': 'executionPhase', 'type': 'str'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, *, cloud_priority: Optional[int] = None, default_data_retention_hint: Optional[int] = None, compliance_cluster: Optional[str] = None, euclid_workspace_id: Optional[str] = None, attached_modules: Optional[List[str]] = None, acceptable_machine_clusters: Optional[List[str]] = None, custom_data_location_id: Optional[str] = None, alert_timeout_duration: Optional[str] = None, runconfig: Optional[str] = None, id: Optional[str] = None, module_id: Optional[str] = None, comment: Optional[str] = None, name: Optional[str] = None, module_parameters: Optional[List["AetherParameterAssignment"]] = None, module_metadata_parameters: Optional[List["AetherParameterAssignment"]] = None, module_output_settings: Optional[List["AetherOutputSetting"]] = None, module_input_settings: Optional[List["AetherInputSetting"]] = None, use_graph_default_compute: Optional[bool] = None, use_graph_default_datastore: Optional[bool] = None, regenerate_output: Optional[bool] = None, control_inputs: Optional[List["AetherControlInput"]] = None, cloud_settings: Optional["AetherCloudSettings"] = None, execution_phase: Optional[Union[str, "AetherExecutionPhase"]] = None, run_attribution: Optional[str] = None, **kwargs ): """ :keyword cloud_priority: :paramtype cloud_priority: int :keyword default_data_retention_hint: :paramtype default_data_retention_hint: int :keyword compliance_cluster: :paramtype compliance_cluster: str :keyword euclid_workspace_id: :paramtype euclid_workspace_id: str :keyword attached_modules: :paramtype attached_modules: list[str] :keyword acceptable_machine_clusters: :paramtype acceptable_machine_clusters: list[str] :keyword custom_data_location_id: :paramtype custom_data_location_id: str :keyword alert_timeout_duration: :paramtype alert_timeout_duration: str :keyword runconfig: :paramtype runconfig: str :keyword id: :paramtype id: str :keyword module_id: :paramtype module_id: str :keyword comment: :paramtype comment: str :keyword name: :paramtype name: str :keyword module_parameters: :paramtype module_parameters: list[~flow.models.AetherParameterAssignment] :keyword module_metadata_parameters: :paramtype module_metadata_parameters: list[~flow.models.AetherParameterAssignment] :keyword module_output_settings: :paramtype module_output_settings: list[~flow.models.AetherOutputSetting] :keyword module_input_settings: :paramtype module_input_settings: list[~flow.models.AetherInputSetting] :keyword use_graph_default_compute: :paramtype use_graph_default_compute: bool :keyword use_graph_default_datastore: :paramtype use_graph_default_datastore: bool :keyword regenerate_output: :paramtype regenerate_output: bool :keyword control_inputs: :paramtype control_inputs: list[~flow.models.AetherControlInput] :keyword cloud_settings: :paramtype cloud_settings: ~flow.models.AetherCloudSettings :keyword execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :paramtype execution_phase: str or ~flow.models.AetherExecutionPhase :keyword run_attribution: :paramtype run_attribution: str """ super(AetherGraphModuleNode, self).__init__(**kwargs) self.cloud_priority = cloud_priority self.default_data_retention_hint = default_data_retention_hint self.compliance_cluster = compliance_cluster self.euclid_workspace_id = euclid_workspace_id self.attached_modules = attached_modules self.acceptable_machine_clusters = acceptable_machine_clusters self.custom_data_location_id = custom_data_location_id self.alert_timeout_duration = alert_timeout_duration self.runconfig = runconfig self.id = id self.module_id = module_id self.comment = comment self.name = name self.module_parameters = module_parameters self.module_metadata_parameters = module_metadata_parameters self.module_output_settings = module_output_settings self.module_input_settings = module_input_settings self.use_graph_default_compute = use_graph_default_compute self.use_graph_default_datastore = use_graph_default_datastore self.regenerate_output = regenerate_output self.control_inputs = control_inputs self.cloud_settings = cloud_settings self.execution_phase = execution_phase self.run_attribution = run_attribution class AetherGraphReferenceNode(msrest.serialization.Model): """AetherGraphReferenceNode. :ivar graph_id: :vartype graph_id: str :ivar default_compute: :vartype default_compute: ~flow.models.AetherComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.AetherDatastoreSetting :ivar id: :vartype id: str :ivar module_id: :vartype module_id: str :ivar comment: :vartype comment: str :ivar name: :vartype name: str :ivar module_parameters: :vartype module_parameters: list[~flow.models.AetherParameterAssignment] :ivar module_metadata_parameters: :vartype module_metadata_parameters: list[~flow.models.AetherParameterAssignment] :ivar module_output_settings: :vartype module_output_settings: list[~flow.models.AetherOutputSetting] :ivar module_input_settings: :vartype module_input_settings: list[~flow.models.AetherInputSetting] :ivar use_graph_default_compute: :vartype use_graph_default_compute: bool :ivar use_graph_default_datastore: :vartype use_graph_default_datastore: bool :ivar regenerate_output: :vartype regenerate_output: bool :ivar control_inputs: :vartype control_inputs: list[~flow.models.AetherControlInput] :ivar cloud_settings: :vartype cloud_settings: ~flow.models.AetherCloudSettings :ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :vartype execution_phase: str or ~flow.models.AetherExecutionPhase :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'graph_id': {'key': 'graphId', 'type': 'str'}, 'default_compute': {'key': 'defaultCompute', 'type': 'AetherComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'AetherDatastoreSetting'}, 'id': {'key': 'id', 'type': 'str'}, 'module_id': {'key': 'moduleId', 'type': 'str'}, 'comment': {'key': 'comment', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'module_parameters': {'key': 'moduleParameters', 'type': '[AetherParameterAssignment]'}, 'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[AetherParameterAssignment]'}, 'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[AetherOutputSetting]'}, 'module_input_settings': {'key': 'moduleInputSettings', 'type': '[AetherInputSetting]'}, 'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'}, 'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'}, 'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'}, 'control_inputs': {'key': 'controlInputs', 'type': '[AetherControlInput]'}, 'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'}, 'execution_phase': {'key': 'executionPhase', 'type': 'str'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, *, graph_id: Optional[str] = None, default_compute: Optional["AetherComputeSetting"] = None, default_datastore: Optional["AetherDatastoreSetting"] = None, id: Optional[str] = None, module_id: Optional[str] = None, comment: Optional[str] = None, name: Optional[str] = None, module_parameters: Optional[List["AetherParameterAssignment"]] = None, module_metadata_parameters: Optional[List["AetherParameterAssignment"]] = None, module_output_settings: Optional[List["AetherOutputSetting"]] = None, module_input_settings: Optional[List["AetherInputSetting"]] = None, use_graph_default_compute: Optional[bool] = None, use_graph_default_datastore: Optional[bool] = None, regenerate_output: Optional[bool] = None, control_inputs: Optional[List["AetherControlInput"]] = None, cloud_settings: Optional["AetherCloudSettings"] = None, execution_phase: Optional[Union[str, "AetherExecutionPhase"]] = None, run_attribution: Optional[str] = None, **kwargs ): """ :keyword graph_id: :paramtype graph_id: str :keyword default_compute: :paramtype default_compute: ~flow.models.AetherComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.AetherDatastoreSetting :keyword id: :paramtype id: str :keyword module_id: :paramtype module_id: str :keyword comment: :paramtype comment: str :keyword name: :paramtype name: str :keyword module_parameters: :paramtype module_parameters: list[~flow.models.AetherParameterAssignment] :keyword module_metadata_parameters: :paramtype module_metadata_parameters: list[~flow.models.AetherParameterAssignment] :keyword module_output_settings: :paramtype module_output_settings: list[~flow.models.AetherOutputSetting] :keyword module_input_settings: :paramtype module_input_settings: list[~flow.models.AetherInputSetting] :keyword use_graph_default_compute: :paramtype use_graph_default_compute: bool :keyword use_graph_default_datastore: :paramtype use_graph_default_datastore: bool :keyword regenerate_output: :paramtype regenerate_output: bool :keyword control_inputs: :paramtype control_inputs: list[~flow.models.AetherControlInput] :keyword cloud_settings: :paramtype cloud_settings: ~flow.models.AetherCloudSettings :keyword execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :paramtype execution_phase: str or ~flow.models.AetherExecutionPhase :keyword run_attribution: :paramtype run_attribution: str """ super(AetherGraphReferenceNode, self).__init__(**kwargs) self.graph_id = graph_id self.default_compute = default_compute self.default_datastore = default_datastore self.id = id self.module_id = module_id self.comment = comment self.name = name self.module_parameters = module_parameters self.module_metadata_parameters = module_metadata_parameters self.module_output_settings = module_output_settings self.module_input_settings = module_input_settings self.use_graph_default_compute = use_graph_default_compute self.use_graph_default_datastore = use_graph_default_datastore self.regenerate_output = regenerate_output self.control_inputs = control_inputs self.cloud_settings = cloud_settings self.execution_phase = execution_phase self.run_attribution = run_attribution class AetherHdfsReference(msrest.serialization.Model): """AetherHdfsReference. :ivar aml_data_store_name: :vartype aml_data_store_name: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, *, aml_data_store_name: Optional[str] = None, relative_path: Optional[str] = None, **kwargs ): """ :keyword aml_data_store_name: :paramtype aml_data_store_name: str :keyword relative_path: :paramtype relative_path: str """ super(AetherHdfsReference, self).__init__(**kwargs) self.aml_data_store_name = aml_data_store_name self.relative_path = relative_path class AetherHdiClusterComputeInfo(msrest.serialization.Model): """AetherHdiClusterComputeInfo. :ivar address: :vartype address: str :ivar username: :vartype username: str :ivar password: :vartype password: str :ivar private_key: :vartype private_key: str """ _attribute_map = { 'address': {'key': 'address', 'type': 'str'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'private_key': {'key': 'privateKey', 'type': 'str'}, } def __init__( self, *, address: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, private_key: Optional[str] = None, **kwargs ): """ :keyword address: :paramtype address: str :keyword username: :paramtype username: str :keyword password: :paramtype password: str :keyword private_key: :paramtype private_key: str """ super(AetherHdiClusterComputeInfo, self).__init__(**kwargs) self.address = address self.username = username self.password = password self.private_key = private_key class AetherHdiRunConfiguration(msrest.serialization.Model): """AetherHdiRunConfiguration. :ivar file: :vartype file: str :ivar class_name: :vartype class_name: str :ivar files: :vartype files: list[str] :ivar archives: :vartype archives: list[str] :ivar jars: :vartype jars: list[str] :ivar py_files: :vartype py_files: list[str] :ivar compute_name: :vartype compute_name: str :ivar queue: :vartype queue: str :ivar driver_memory: :vartype driver_memory: str :ivar driver_cores: :vartype driver_cores: int :ivar executor_memory: :vartype executor_memory: str :ivar executor_cores: :vartype executor_cores: int :ivar number_executors: :vartype number_executors: int :ivar conf: Dictionary of :code:`<string>`. :vartype conf: dict[str, str] :ivar name: :vartype name: str """ _attribute_map = { 'file': {'key': 'file', 'type': 'str'}, 'class_name': {'key': 'className', 'type': 'str'}, 'files': {'key': 'files', 'type': '[str]'}, 'archives': {'key': 'archives', 'type': '[str]'}, 'jars': {'key': 'jars', 'type': '[str]'}, 'py_files': {'key': 'pyFiles', 'type': '[str]'}, 'compute_name': {'key': 'computeName', 'type': 'str'}, 'queue': {'key': 'queue', 'type': 'str'}, 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, 'driver_cores': {'key': 'driverCores', 'type': 'int'}, 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, 'executor_cores': {'key': 'executorCores', 'type': 'int'}, 'number_executors': {'key': 'numberExecutors', 'type': 'int'}, 'conf': {'key': 'conf', 'type': '{str}'}, 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, *, file: Optional[str] = None, class_name: Optional[str] = None, files: Optional[List[str]] = None, archives: Optional[List[str]] = None, jars: Optional[List[str]] = None, py_files: Optional[List[str]] = None, compute_name: Optional[str] = None, queue: Optional[str] = None, driver_memory: Optional[str] = None, driver_cores: Optional[int] = None, executor_memory: Optional[str] = None, executor_cores: Optional[int] = None, number_executors: Optional[int] = None, conf: Optional[Dict[str, str]] = None, name: Optional[str] = None, **kwargs ): """ :keyword file: :paramtype file: str :keyword class_name: :paramtype class_name: str :keyword files: :paramtype files: list[str] :keyword archives: :paramtype archives: list[str] :keyword jars: :paramtype jars: list[str] :keyword py_files: :paramtype py_files: list[str] :keyword compute_name: :paramtype compute_name: str :keyword queue: :paramtype queue: str :keyword driver_memory: :paramtype driver_memory: str :keyword driver_cores: :paramtype driver_cores: int :keyword executor_memory: :paramtype executor_memory: str :keyword executor_cores: :paramtype executor_cores: int :keyword number_executors: :paramtype number_executors: int :keyword conf: Dictionary of :code:`<string>`. :paramtype conf: dict[str, str] :keyword name: :paramtype name: str """ super(AetherHdiRunConfiguration, self).__init__(**kwargs) self.file = file self.class_name = class_name self.files = files self.archives = archives self.jars = jars self.py_files = py_files self.compute_name = compute_name self.queue = queue self.driver_memory = driver_memory self.driver_cores = driver_cores self.executor_memory = executor_memory self.executor_cores = executor_cores self.number_executors = number_executors self.conf = conf self.name = name class AetherHyperDriveConfiguration(msrest.serialization.Model): """AetherHyperDriveConfiguration. :ivar hyper_drive_run_config: :vartype hyper_drive_run_config: str :ivar primary_metric_goal: :vartype primary_metric_goal: str :ivar primary_metric_name: :vartype primary_metric_name: str :ivar arguments: :vartype arguments: list[~flow.models.AetherArgumentAssignment] """ _attribute_map = { 'hyper_drive_run_config': {'key': 'hyperDriveRunConfig', 'type': 'str'}, 'primary_metric_goal': {'key': 'primaryMetricGoal', 'type': 'str'}, 'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'}, 'arguments': {'key': 'arguments', 'type': '[AetherArgumentAssignment]'}, } def __init__( self, *, hyper_drive_run_config: Optional[str] = None, primary_metric_goal: Optional[str] = None, primary_metric_name: Optional[str] = None, arguments: Optional[List["AetherArgumentAssignment"]] = None, **kwargs ): """ :keyword hyper_drive_run_config: :paramtype hyper_drive_run_config: str :keyword primary_metric_goal: :paramtype primary_metric_goal: str :keyword primary_metric_name: :paramtype primary_metric_name: str :keyword arguments: :paramtype arguments: list[~flow.models.AetherArgumentAssignment] """ super(AetherHyperDriveConfiguration, self).__init__(**kwargs) self.hyper_drive_run_config = hyper_drive_run_config self.primary_metric_goal = primary_metric_goal self.primary_metric_name = primary_metric_name self.arguments = arguments class AetherIdentitySetting(msrest.serialization.Model): """AetherIdentitySetting. :ivar type: Possible values include: "UserIdentity", "Managed", "AMLToken". :vartype type: str or ~flow.models.AetherIdentityType :ivar client_id: :vartype client_id: str :ivar object_id: :vartype object_id: str :ivar msi_resource_id: :vartype msi_resource_id: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'client_id': {'key': 'clientId', 'type': 'str'}, 'object_id': {'key': 'objectId', 'type': 'str'}, 'msi_resource_id': {'key': 'msiResourceId', 'type': 'str'}, } def __init__( self, *, type: Optional[Union[str, "AetherIdentityType"]] = None, client_id: Optional[str] = None, object_id: Optional[str] = None, msi_resource_id: Optional[str] = None, **kwargs ): """ :keyword type: Possible values include: "UserIdentity", "Managed", "AMLToken". :paramtype type: str or ~flow.models.AetherIdentityType :keyword client_id: :paramtype client_id: str :keyword object_id: :paramtype object_id: str :keyword msi_resource_id: :paramtype msi_resource_id: str """ super(AetherIdentitySetting, self).__init__(**kwargs) self.type = type self.client_id = client_id self.object_id = object_id self.msi_resource_id = msi_resource_id class AetherImportDataTask(msrest.serialization.Model): """AetherImportDataTask. :ivar data_transfer_source: :vartype data_transfer_source: ~flow.models.AetherDataTransferSource """ _attribute_map = { 'data_transfer_source': {'key': 'DataTransferSource', 'type': 'AetherDataTransferSource'}, } def __init__( self, *, data_transfer_source: Optional["AetherDataTransferSource"] = None, **kwargs ): """ :keyword data_transfer_source: :paramtype data_transfer_source: ~flow.models.AetherDataTransferSource """ super(AetherImportDataTask, self).__init__(**kwargs) self.data_transfer_source = data_transfer_source class AetherInputSetting(msrest.serialization.Model): """AetherInputSetting. :ivar name: :vartype name: str :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AetherDataStoreMode :ivar path_on_compute: :vartype path_on_compute: str :ivar options: This is a dictionary. :vartype options: dict[str, str] :ivar additional_transformations: :vartype additional_transformations: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'options': {'key': 'options', 'type': '{str}'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, data_store_mode: Optional[Union[str, "AetherDataStoreMode"]] = None, path_on_compute: Optional[str] = None, options: Optional[Dict[str, str]] = None, additional_transformations: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode :keyword path_on_compute: :paramtype path_on_compute: str :keyword options: This is a dictionary. :paramtype options: dict[str, str] :keyword additional_transformations: :paramtype additional_transformations: str """ super(AetherInputSetting, self).__init__(**kwargs) self.name = name self.data_store_mode = data_store_mode self.path_on_compute = path_on_compute self.options = options self.additional_transformations = additional_transformations class AetherInteractiveConfig(msrest.serialization.Model): """AetherInteractiveConfig. :ivar is_ssh_enabled: :vartype is_ssh_enabled: bool :ivar ssh_public_key: :vartype ssh_public_key: str :ivar is_i_python_enabled: :vartype is_i_python_enabled: bool :ivar is_tensor_board_enabled: :vartype is_tensor_board_enabled: bool :ivar interactive_port: :vartype interactive_port: int """ _attribute_map = { 'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'}, 'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'}, 'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'}, 'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'}, 'interactive_port': {'key': 'interactivePort', 'type': 'int'}, } def __init__( self, *, is_ssh_enabled: Optional[bool] = None, ssh_public_key: Optional[str] = None, is_i_python_enabled: Optional[bool] = None, is_tensor_board_enabled: Optional[bool] = None, interactive_port: Optional[int] = None, **kwargs ): """ :keyword is_ssh_enabled: :paramtype is_ssh_enabled: bool :keyword ssh_public_key: :paramtype ssh_public_key: str :keyword is_i_python_enabled: :paramtype is_i_python_enabled: bool :keyword is_tensor_board_enabled: :paramtype is_tensor_board_enabled: bool :keyword interactive_port: :paramtype interactive_port: int """ super(AetherInteractiveConfig, self).__init__(**kwargs) self.is_ssh_enabled = is_ssh_enabled self.ssh_public_key = ssh_public_key self.is_i_python_enabled = is_i_python_enabled self.is_tensor_board_enabled = is_tensor_board_enabled self.interactive_port = interactive_port class AetherK8SConfiguration(msrest.serialization.Model): """AetherK8SConfiguration. :ivar max_retry_count: :vartype max_retry_count: int :ivar resource_configuration: :vartype resource_configuration: ~flow.models.AetherResourceConfig :ivar priority_configuration: :vartype priority_configuration: ~flow.models.AetherPriorityConfig :ivar interactive_configuration: :vartype interactive_configuration: ~flow.models.AetherInteractiveConfig """ _attribute_map = { 'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'}, 'resource_configuration': {'key': 'resourceConfiguration', 'type': 'AetherResourceConfig'}, 'priority_configuration': {'key': 'priorityConfiguration', 'type': 'AetherPriorityConfig'}, 'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'AetherInteractiveConfig'}, } def __init__( self, *, max_retry_count: Optional[int] = None, resource_configuration: Optional["AetherResourceConfig"] = None, priority_configuration: Optional["AetherPriorityConfig"] = None, interactive_configuration: Optional["AetherInteractiveConfig"] = None, **kwargs ): """ :keyword max_retry_count: :paramtype max_retry_count: int :keyword resource_configuration: :paramtype resource_configuration: ~flow.models.AetherResourceConfig :keyword priority_configuration: :paramtype priority_configuration: ~flow.models.AetherPriorityConfig :keyword interactive_configuration: :paramtype interactive_configuration: ~flow.models.AetherInteractiveConfig """ super(AetherK8SConfiguration, self).__init__(**kwargs) self.max_retry_count = max_retry_count self.resource_configuration = resource_configuration self.priority_configuration = priority_configuration self.interactive_configuration = interactive_configuration class AetherLegacyDataPath(msrest.serialization.Model): """AetherLegacyDataPath. :ivar data_store_name: :vartype data_store_name: str :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AetherDataStoreMode :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, *, data_store_name: Optional[str] = None, data_store_mode: Optional[Union[str, "AetherDataStoreMode"]] = None, relative_path: Optional[str] = None, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode :keyword relative_path: :paramtype relative_path: str """ super(AetherLegacyDataPath, self).__init__(**kwargs) self.data_store_name = data_store_name self.data_store_mode = data_store_mode self.relative_path = relative_path class AetherLimitSettings(msrest.serialization.Model): """AetherLimitSettings. :ivar max_trials: :vartype max_trials: int :ivar timeout: :vartype timeout: str :ivar trial_timeout: :vartype trial_timeout: str :ivar max_concurrent_trials: :vartype max_concurrent_trials: int :ivar max_cores_per_trial: :vartype max_cores_per_trial: int :ivar exit_score: :vartype exit_score: float :ivar enable_early_termination: :vartype enable_early_termination: bool :ivar max_nodes: :vartype max_nodes: int """ _attribute_map = { 'max_trials': {'key': 'maxTrials', 'type': 'int'}, 'timeout': {'key': 'timeout', 'type': 'str'}, 'trial_timeout': {'key': 'trialTimeout', 'type': 'str'}, 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, 'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'}, 'exit_score': {'key': 'exitScore', 'type': 'float'}, 'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'}, 'max_nodes': {'key': 'maxNodes', 'type': 'int'}, } def __init__( self, *, max_trials: Optional[int] = None, timeout: Optional[str] = None, trial_timeout: Optional[str] = None, max_concurrent_trials: Optional[int] = None, max_cores_per_trial: Optional[int] = None, exit_score: Optional[float] = None, enable_early_termination: Optional[bool] = None, max_nodes: Optional[int] = None, **kwargs ): """ :keyword max_trials: :paramtype max_trials: int :keyword timeout: :paramtype timeout: str :keyword trial_timeout: :paramtype trial_timeout: str :keyword max_concurrent_trials: :paramtype max_concurrent_trials: int :keyword max_cores_per_trial: :paramtype max_cores_per_trial: int :keyword exit_score: :paramtype exit_score: float :keyword enable_early_termination: :paramtype enable_early_termination: bool :keyword max_nodes: :paramtype max_nodes: int """ super(AetherLimitSettings, self).__init__(**kwargs) self.max_trials = max_trials self.timeout = timeout self.trial_timeout = trial_timeout self.max_concurrent_trials = max_concurrent_trials self.max_cores_per_trial = max_cores_per_trial self.exit_score = exit_score self.enable_early_termination = enable_early_termination self.max_nodes = max_nodes class AetherMlcComputeInfo(msrest.serialization.Model): """AetherMlcComputeInfo. :ivar mlc_compute_type: :vartype mlc_compute_type: str """ _attribute_map = { 'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'}, } def __init__( self, *, mlc_compute_type: Optional[str] = None, **kwargs ): """ :keyword mlc_compute_type: :paramtype mlc_compute_type: str """ super(AetherMlcComputeInfo, self).__init__(**kwargs) self.mlc_compute_type = mlc_compute_type class AetherModuleEntity(msrest.serialization.Model): """AetherModuleEntity. :ivar last_updated_by: :vartype last_updated_by: ~flow.models.AetherCreatedBy :ivar display_name: :vartype display_name: str :ivar module_execution_type: :vartype module_execution_type: str :ivar module_type: Possible values include: "None", "BatchInferencing". :vartype module_type: str or ~flow.models.AetherModuleType :ivar module_type_version: :vartype module_type_version: str :ivar resource_requirements: :vartype resource_requirements: ~flow.models.AetherResourceModel :ivar machine_cluster: :vartype machine_cluster: list[str] :ivar default_compliance_cluster: :vartype default_compliance_cluster: str :ivar repository_type: Possible values include: "None", "Other", "Git", "SourceDepot", "Cosmos". :vartype repository_type: str or ~flow.models.AetherRepositoryType :ivar relative_path_to_source_code: :vartype relative_path_to_source_code: str :ivar commit_id: :vartype commit_id: str :ivar code_review_link: :vartype code_review_link: str :ivar unit_tests_available: :vartype unit_tests_available: bool :ivar is_compressed: :vartype is_compressed: bool :ivar execution_environment: Possible values include: "ExeWorkerMachine", "DockerContainerWithoutNetwork", "DockerContainerWithNetwork", "HyperVWithoutNetwork", "HyperVWithNetwork". :vartype execution_environment: str or ~flow.models.AetherExecutionEnvironment :ivar is_output_markup_enabled: :vartype is_output_markup_enabled: bool :ivar docker_image_id: :vartype docker_image_id: str :ivar docker_image_reference: :vartype docker_image_reference: str :ivar docker_image_security_groups: :vartype docker_image_security_groups: str :ivar extended_properties: :vartype extended_properties: ~flow.models.AetherModuleExtendedProperties :ivar deployment_source: Possible values include: "Client", "AutoDeployment", "Vsts". :vartype deployment_source: str or ~flow.models.AetherModuleDeploymentSource :ivar deployment_source_metadata: :vartype deployment_source_metadata: str :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str :ivar kv_tags: This is a dictionary. :vartype kv_tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar created_by: :vartype created_by: ~flow.models.AetherCreatedBy :ivar runconfig: :vartype runconfig: str :ivar cloud_settings: :vartype cloud_settings: ~flow.models.AetherCloudSettings :ivar category: :vartype category: str :ivar step_type: :vartype step_type: str :ivar stage: :vartype stage: str :ivar upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed". :vartype upload_state: str or ~flow.models.AetherUploadState :ivar source_code_location: :vartype source_code_location: str :ivar size_in_bytes: :vartype size_in_bytes: long :ivar download_location: :vartype download_location: str :ivar data_location: :vartype data_location: ~flow.models.AetherDataLocation :ivar scripting_runtime_id: :vartype scripting_runtime_id: str :ivar interface_documentation: :vartype interface_documentation: ~flow.models.AetherEntityInterfaceDocumentation :ivar is_eyes_on: :vartype is_eyes_on: bool :ivar compliance_cluster: :vartype compliance_cluster: str :ivar is_deterministic: :vartype is_deterministic: bool :ivar information_url: :vartype information_url: str :ivar is_experiment_id_in_parameters: :vartype is_experiment_id_in_parameters: bool :ivar interface_string: :vartype interface_string: str :ivar default_parameters: This is a dictionary. :vartype default_parameters: dict[str, str] :ivar structured_interface: :vartype structured_interface: ~flow.models.AetherStructuredInterface :ivar family_id: :vartype family_id: str :ivar name: :vartype name: str :ivar hash: :vartype hash: str :ivar description: :vartype description: str :ivar version: :vartype version: str :ivar sequence_number_in_family: :vartype sequence_number_in_family: int :ivar owner: :vartype owner: str :ivar azure_tenant_id: :vartype azure_tenant_id: str :ivar azure_user_id: :vartype azure_user_id: str :ivar collaborators: :vartype collaborators: list[str] :ivar id: :vartype id: str :ivar workspace_id: :vartype workspace_id: str :ivar etag: :vartype etag: str :ivar tags: A set of tags. :vartype tags: list[str] :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.AetherEntityStatus """ _attribute_map = { 'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'AetherCreatedBy'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'module_execution_type': {'key': 'moduleExecutionType', 'type': 'str'}, 'module_type': {'key': 'moduleType', 'type': 'str'}, 'module_type_version': {'key': 'moduleTypeVersion', 'type': 'str'}, 'resource_requirements': {'key': 'resourceRequirements', 'type': 'AetherResourceModel'}, 'machine_cluster': {'key': 'machineCluster', 'type': '[str]'}, 'default_compliance_cluster': {'key': 'defaultComplianceCluster', 'type': 'str'}, 'repository_type': {'key': 'repositoryType', 'type': 'str'}, 'relative_path_to_source_code': {'key': 'relativePathToSourceCode', 'type': 'str'}, 'commit_id': {'key': 'commitId', 'type': 'str'}, 'code_review_link': {'key': 'codeReviewLink', 'type': 'str'}, 'unit_tests_available': {'key': 'unitTestsAvailable', 'type': 'bool'}, 'is_compressed': {'key': 'isCompressed', 'type': 'bool'}, 'execution_environment': {'key': 'executionEnvironment', 'type': 'str'}, 'is_output_markup_enabled': {'key': 'isOutputMarkupEnabled', 'type': 'bool'}, 'docker_image_id': {'key': 'dockerImageId', 'type': 'str'}, 'docker_image_reference': {'key': 'dockerImageReference', 'type': 'str'}, 'docker_image_security_groups': {'key': 'dockerImageSecurityGroups', 'type': 'str'}, 'extended_properties': {'key': 'extendedProperties', 'type': 'AetherModuleExtendedProperties'}, 'deployment_source': {'key': 'deploymentSource', 'type': 'str'}, 'deployment_source_metadata': {'key': 'deploymentSourceMetadata', 'type': 'str'}, 'identifier_hash': {'key': 'identifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'identifierHashV2', 'type': 'str'}, 'kv_tags': {'key': 'kvTags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'created_by': {'key': 'createdBy', 'type': 'AetherCreatedBy'}, 'runconfig': {'key': 'runconfig', 'type': 'str'}, 'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'}, 'category': {'key': 'category', 'type': 'str'}, 'step_type': {'key': 'stepType', 'type': 'str'}, 'stage': {'key': 'stage', 'type': 'str'}, 'upload_state': {'key': 'uploadState', 'type': 'str'}, 'source_code_location': {'key': 'sourceCodeLocation', 'type': 'str'}, 'size_in_bytes': {'key': 'sizeInBytes', 'type': 'long'}, 'download_location': {'key': 'downloadLocation', 'type': 'str'}, 'data_location': {'key': 'dataLocation', 'type': 'AetherDataLocation'}, 'scripting_runtime_id': {'key': 'scriptingRuntimeId', 'type': 'str'}, 'interface_documentation': {'key': 'interfaceDocumentation', 'type': 'AetherEntityInterfaceDocumentation'}, 'is_eyes_on': {'key': 'isEyesOn', 'type': 'bool'}, 'compliance_cluster': {'key': 'complianceCluster', 'type': 'str'}, 'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'}, 'information_url': {'key': 'informationUrl', 'type': 'str'}, 'is_experiment_id_in_parameters': {'key': 'isExperimentIdInParameters', 'type': 'bool'}, 'interface_string': {'key': 'interfaceString', 'type': 'str'}, 'default_parameters': {'key': 'defaultParameters', 'type': '{str}'}, 'structured_interface': {'key': 'structuredInterface', 'type': 'AetherStructuredInterface'}, 'family_id': {'key': 'familyId', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'hash': {'key': 'hash', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'sequence_number_in_family': {'key': 'sequenceNumberInFamily', 'type': 'int'}, 'owner': {'key': 'owner', 'type': 'str'}, 'azure_tenant_id': {'key': 'azureTenantId', 'type': 'str'}, 'azure_user_id': {'key': 'azureUserId', 'type': 'str'}, 'collaborators': {'key': 'collaborators', 'type': '[str]'}, 'id': {'key': 'id', 'type': 'str'}, 'workspace_id': {'key': 'workspaceId', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '[str]'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, } def __init__( self, *, last_updated_by: Optional["AetherCreatedBy"] = None, display_name: Optional[str] = None, module_execution_type: Optional[str] = None, module_type: Optional[Union[str, "AetherModuleType"]] = None, module_type_version: Optional[str] = None, resource_requirements: Optional["AetherResourceModel"] = None, machine_cluster: Optional[List[str]] = None, default_compliance_cluster: Optional[str] = None, repository_type: Optional[Union[str, "AetherRepositoryType"]] = None, relative_path_to_source_code: Optional[str] = None, commit_id: Optional[str] = None, code_review_link: Optional[str] = None, unit_tests_available: Optional[bool] = None, is_compressed: Optional[bool] = None, execution_environment: Optional[Union[str, "AetherExecutionEnvironment"]] = None, is_output_markup_enabled: Optional[bool] = None, docker_image_id: Optional[str] = None, docker_image_reference: Optional[str] = None, docker_image_security_groups: Optional[str] = None, extended_properties: Optional["AetherModuleExtendedProperties"] = None, deployment_source: Optional[Union[str, "AetherModuleDeploymentSource"]] = None, deployment_source_metadata: Optional[str] = None, identifier_hash: Optional[str] = None, identifier_hash_v2: Optional[str] = None, kv_tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, created_by: Optional["AetherCreatedBy"] = None, runconfig: Optional[str] = None, cloud_settings: Optional["AetherCloudSettings"] = None, category: Optional[str] = None, step_type: Optional[str] = None, stage: Optional[str] = None, upload_state: Optional[Union[str, "AetherUploadState"]] = None, source_code_location: Optional[str] = None, size_in_bytes: Optional[int] = None, download_location: Optional[str] = None, data_location: Optional["AetherDataLocation"] = None, scripting_runtime_id: Optional[str] = None, interface_documentation: Optional["AetherEntityInterfaceDocumentation"] = None, is_eyes_on: Optional[bool] = None, compliance_cluster: Optional[str] = None, is_deterministic: Optional[bool] = None, information_url: Optional[str] = None, is_experiment_id_in_parameters: Optional[bool] = None, interface_string: Optional[str] = None, default_parameters: Optional[Dict[str, str]] = None, structured_interface: Optional["AetherStructuredInterface"] = None, family_id: Optional[str] = None, name: Optional[str] = None, hash: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, sequence_number_in_family: Optional[int] = None, owner: Optional[str] = None, azure_tenant_id: Optional[str] = None, azure_user_id: Optional[str] = None, collaborators: Optional[List[str]] = None, id: Optional[str] = None, workspace_id: Optional[str] = None, etag: Optional[str] = None, tags: Optional[List[str]] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, entity_status: Optional[Union[str, "AetherEntityStatus"]] = None, **kwargs ): """ :keyword last_updated_by: :paramtype last_updated_by: ~flow.models.AetherCreatedBy :keyword display_name: :paramtype display_name: str :keyword module_execution_type: :paramtype module_execution_type: str :keyword module_type: Possible values include: "None", "BatchInferencing". :paramtype module_type: str or ~flow.models.AetherModuleType :keyword module_type_version: :paramtype module_type_version: str :keyword resource_requirements: :paramtype resource_requirements: ~flow.models.AetherResourceModel :keyword machine_cluster: :paramtype machine_cluster: list[str] :keyword default_compliance_cluster: :paramtype default_compliance_cluster: str :keyword repository_type: Possible values include: "None", "Other", "Git", "SourceDepot", "Cosmos". :paramtype repository_type: str or ~flow.models.AetherRepositoryType :keyword relative_path_to_source_code: :paramtype relative_path_to_source_code: str :keyword commit_id: :paramtype commit_id: str :keyword code_review_link: :paramtype code_review_link: str :keyword unit_tests_available: :paramtype unit_tests_available: bool :keyword is_compressed: :paramtype is_compressed: bool :keyword execution_environment: Possible values include: "ExeWorkerMachine", "DockerContainerWithoutNetwork", "DockerContainerWithNetwork", "HyperVWithoutNetwork", "HyperVWithNetwork". :paramtype execution_environment: str or ~flow.models.AetherExecutionEnvironment :keyword is_output_markup_enabled: :paramtype is_output_markup_enabled: bool :keyword docker_image_id: :paramtype docker_image_id: str :keyword docker_image_reference: :paramtype docker_image_reference: str :keyword docker_image_security_groups: :paramtype docker_image_security_groups: str :keyword extended_properties: :paramtype extended_properties: ~flow.models.AetherModuleExtendedProperties :keyword deployment_source: Possible values include: "Client", "AutoDeployment", "Vsts". :paramtype deployment_source: str or ~flow.models.AetherModuleDeploymentSource :keyword deployment_source_metadata: :paramtype deployment_source_metadata: str :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str :keyword kv_tags: This is a dictionary. :paramtype kv_tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword created_by: :paramtype created_by: ~flow.models.AetherCreatedBy :keyword runconfig: :paramtype runconfig: str :keyword cloud_settings: :paramtype cloud_settings: ~flow.models.AetherCloudSettings :keyword category: :paramtype category: str :keyword step_type: :paramtype step_type: str :keyword stage: :paramtype stage: str :keyword upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed". :paramtype upload_state: str or ~flow.models.AetherUploadState :keyword source_code_location: :paramtype source_code_location: str :keyword size_in_bytes: :paramtype size_in_bytes: long :keyword download_location: :paramtype download_location: str :keyword data_location: :paramtype data_location: ~flow.models.AetherDataLocation :keyword scripting_runtime_id: :paramtype scripting_runtime_id: str :keyword interface_documentation: :paramtype interface_documentation: ~flow.models.AetherEntityInterfaceDocumentation :keyword is_eyes_on: :paramtype is_eyes_on: bool :keyword compliance_cluster: :paramtype compliance_cluster: str :keyword is_deterministic: :paramtype is_deterministic: bool :keyword information_url: :paramtype information_url: str :keyword is_experiment_id_in_parameters: :paramtype is_experiment_id_in_parameters: bool :keyword interface_string: :paramtype interface_string: str :keyword default_parameters: This is a dictionary. :paramtype default_parameters: dict[str, str] :keyword structured_interface: :paramtype structured_interface: ~flow.models.AetherStructuredInterface :keyword family_id: :paramtype family_id: str :keyword name: :paramtype name: str :keyword hash: :paramtype hash: str :keyword description: :paramtype description: str :keyword version: :paramtype version: str :keyword sequence_number_in_family: :paramtype sequence_number_in_family: int :keyword owner: :paramtype owner: str :keyword azure_tenant_id: :paramtype azure_tenant_id: str :keyword azure_user_id: :paramtype azure_user_id: str :keyword collaborators: :paramtype collaborators: list[str] :keyword id: :paramtype id: str :keyword workspace_id: :paramtype workspace_id: str :keyword etag: :paramtype etag: str :keyword tags: A set of tags. :paramtype tags: list[str] :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.AetherEntityStatus """ super(AetherModuleEntity, self).__init__(**kwargs) self.last_updated_by = last_updated_by self.display_name = display_name self.module_execution_type = module_execution_type self.module_type = module_type self.module_type_version = module_type_version self.resource_requirements = resource_requirements self.machine_cluster = machine_cluster self.default_compliance_cluster = default_compliance_cluster self.repository_type = repository_type self.relative_path_to_source_code = relative_path_to_source_code self.commit_id = commit_id self.code_review_link = code_review_link self.unit_tests_available = unit_tests_available self.is_compressed = is_compressed self.execution_environment = execution_environment self.is_output_markup_enabled = is_output_markup_enabled self.docker_image_id = docker_image_id self.docker_image_reference = docker_image_reference self.docker_image_security_groups = docker_image_security_groups self.extended_properties = extended_properties self.deployment_source = deployment_source self.deployment_source_metadata = deployment_source_metadata self.identifier_hash = identifier_hash self.identifier_hash_v2 = identifier_hash_v2 self.kv_tags = kv_tags self.properties = properties self.created_by = created_by self.runconfig = runconfig self.cloud_settings = cloud_settings self.category = category self.step_type = step_type self.stage = stage self.upload_state = upload_state self.source_code_location = source_code_location self.size_in_bytes = size_in_bytes self.download_location = download_location self.data_location = data_location self.scripting_runtime_id = scripting_runtime_id self.interface_documentation = interface_documentation self.is_eyes_on = is_eyes_on self.compliance_cluster = compliance_cluster self.is_deterministic = is_deterministic self.information_url = information_url self.is_experiment_id_in_parameters = is_experiment_id_in_parameters self.interface_string = interface_string self.default_parameters = default_parameters self.structured_interface = structured_interface self.family_id = family_id self.name = name self.hash = hash self.description = description self.version = version self.sequence_number_in_family = sequence_number_in_family self.owner = owner self.azure_tenant_id = azure_tenant_id self.azure_user_id = azure_user_id self.collaborators = collaborators self.id = id self.workspace_id = workspace_id self.etag = etag self.tags = tags self.created_date = created_date self.last_modified_date = last_modified_date self.entity_status = entity_status class AetherModuleExtendedProperties(msrest.serialization.Model): """AetherModuleExtendedProperties. :ivar auto_deployed_artifact: :vartype auto_deployed_artifact: ~flow.models.AetherBuildArtifactInfo :ivar script_needs_approval: :vartype script_needs_approval: bool """ _attribute_map = { 'auto_deployed_artifact': {'key': 'autoDeployedArtifact', 'type': 'AetherBuildArtifactInfo'}, 'script_needs_approval': {'key': 'scriptNeedsApproval', 'type': 'bool'}, } def __init__( self, *, auto_deployed_artifact: Optional["AetherBuildArtifactInfo"] = None, script_needs_approval: Optional[bool] = None, **kwargs ): """ :keyword auto_deployed_artifact: :paramtype auto_deployed_artifact: ~flow.models.AetherBuildArtifactInfo :keyword script_needs_approval: :paramtype script_needs_approval: bool """ super(AetherModuleExtendedProperties, self).__init__(**kwargs) self.auto_deployed_artifact = auto_deployed_artifact self.script_needs_approval = script_needs_approval class AetherNCrossValidations(msrest.serialization.Model): """AetherNCrossValidations. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.AetherNCrossValidationMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, *, mode: Optional[Union[str, "AetherNCrossValidationMode"]] = None, value: Optional[int] = None, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.AetherNCrossValidationMode :keyword value: :paramtype value: int """ super(AetherNCrossValidations, self).__init__(**kwargs) self.mode = mode self.value = value class AetherOutputSetting(msrest.serialization.Model): """AetherOutputSetting. :ivar name: :vartype name: str :ivar data_store_name: :vartype data_store_name: str :ivar data_store_name_parameter_assignment: :vartype data_store_name_parameter_assignment: ~flow.models.AetherParameterAssignment :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AetherDataStoreMode :ivar data_store_mode_parameter_assignment: :vartype data_store_mode_parameter_assignment: ~flow.models.AetherParameterAssignment :ivar path_on_compute: :vartype path_on_compute: str :ivar path_on_compute_parameter_assignment: :vartype path_on_compute_parameter_assignment: ~flow.models.AetherParameterAssignment :ivar overwrite: :vartype overwrite: bool :ivar data_reference_name: :vartype data_reference_name: str :ivar web_service_port: :vartype web_service_port: str :ivar dataset_registration: :vartype dataset_registration: ~flow.models.AetherDatasetRegistration :ivar dataset_output_options: :vartype dataset_output_options: ~flow.models.AetherDatasetOutputOptions :ivar asset_output_settings: :vartype asset_output_settings: ~flow.models.AetherAssetOutputSettings :ivar parameter_name: :vartype parameter_name: str :ivar asset_output_settings_parameter_name: :vartype asset_output_settings_parameter_name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_name_parameter_assignment': {'key': 'DataStoreNameParameterAssignment', 'type': 'AetherParameterAssignment'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'data_store_mode_parameter_assignment': {'key': 'DataStoreModeParameterAssignment', 'type': 'AetherParameterAssignment'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'path_on_compute_parameter_assignment': {'key': 'PathOnComputeParameterAssignment', 'type': 'AetherParameterAssignment'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'}, 'web_service_port': {'key': 'webServicePort', 'type': 'str'}, 'dataset_registration': {'key': 'datasetRegistration', 'type': 'AetherDatasetRegistration'}, 'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'AetherDatasetOutputOptions'}, 'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AetherAssetOutputSettings'}, 'parameter_name': {'key': 'parameterName', 'type': 'str'}, 'asset_output_settings_parameter_name': {'key': 'AssetOutputSettingsParameterName', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, data_store_name: Optional[str] = None, data_store_name_parameter_assignment: Optional["AetherParameterAssignment"] = None, data_store_mode: Optional[Union[str, "AetherDataStoreMode"]] = None, data_store_mode_parameter_assignment: Optional["AetherParameterAssignment"] = None, path_on_compute: Optional[str] = None, path_on_compute_parameter_assignment: Optional["AetherParameterAssignment"] = None, overwrite: Optional[bool] = None, data_reference_name: Optional[str] = None, web_service_port: Optional[str] = None, dataset_registration: Optional["AetherDatasetRegistration"] = None, dataset_output_options: Optional["AetherDatasetOutputOptions"] = None, asset_output_settings: Optional["AetherAssetOutputSettings"] = None, parameter_name: Optional[str] = None, asset_output_settings_parameter_name: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_name_parameter_assignment: :paramtype data_store_name_parameter_assignment: ~flow.models.AetherParameterAssignment :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode :keyword data_store_mode_parameter_assignment: :paramtype data_store_mode_parameter_assignment: ~flow.models.AetherParameterAssignment :keyword path_on_compute: :paramtype path_on_compute: str :keyword path_on_compute_parameter_assignment: :paramtype path_on_compute_parameter_assignment: ~flow.models.AetherParameterAssignment :keyword overwrite: :paramtype overwrite: bool :keyword data_reference_name: :paramtype data_reference_name: str :keyword web_service_port: :paramtype web_service_port: str :keyword dataset_registration: :paramtype dataset_registration: ~flow.models.AetherDatasetRegistration :keyword dataset_output_options: :paramtype dataset_output_options: ~flow.models.AetherDatasetOutputOptions :keyword asset_output_settings: :paramtype asset_output_settings: ~flow.models.AetherAssetOutputSettings :keyword parameter_name: :paramtype parameter_name: str :keyword asset_output_settings_parameter_name: :paramtype asset_output_settings_parameter_name: str """ super(AetherOutputSetting, self).__init__(**kwargs) self.name = name self.data_store_name = data_store_name self.data_store_name_parameter_assignment = data_store_name_parameter_assignment self.data_store_mode = data_store_mode self.data_store_mode_parameter_assignment = data_store_mode_parameter_assignment self.path_on_compute = path_on_compute self.path_on_compute_parameter_assignment = path_on_compute_parameter_assignment self.overwrite = overwrite self.data_reference_name = data_reference_name self.web_service_port = web_service_port self.dataset_registration = dataset_registration self.dataset_output_options = dataset_output_options self.asset_output_settings = asset_output_settings self.parameter_name = parameter_name self.asset_output_settings_parameter_name = asset_output_settings_parameter_name class AetherParallelForControlFlowInfo(msrest.serialization.Model): """AetherParallelForControlFlowInfo. :ivar parallel_for_items_input: :vartype parallel_for_items_input: ~flow.models.AetherParameterAssignment """ _attribute_map = { 'parallel_for_items_input': {'key': 'parallelForItemsInput', 'type': 'AetherParameterAssignment'}, } def __init__( self, *, parallel_for_items_input: Optional["AetherParameterAssignment"] = None, **kwargs ): """ :keyword parallel_for_items_input: :paramtype parallel_for_items_input: ~flow.models.AetherParameterAssignment """ super(AetherParallelForControlFlowInfo, self).__init__(**kwargs) self.parallel_for_items_input = parallel_for_items_input class AetherParameterAssignment(msrest.serialization.Model): """AetherParameterAssignment. :ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition". :vartype value_type: str or ~flow.models.AetherParameterValueType :ivar assignments_to_concatenate: :vartype assignments_to_concatenate: list[~flow.models.AetherParameterAssignment] :ivar data_path_assignment: :vartype data_path_assignment: ~flow.models.AetherLegacyDataPath :ivar data_set_definition_value_assignment: :vartype data_set_definition_value_assignment: ~flow.models.AetherDataSetDefinitionValue :ivar name: :vartype name: str :ivar value: :vartype value: str """ _attribute_map = { 'value_type': {'key': 'valueType', 'type': 'str'}, 'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[AetherParameterAssignment]'}, 'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'AetherLegacyDataPath'}, 'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'AetherDataSetDefinitionValue'}, 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, *, value_type: Optional[Union[str, "AetherParameterValueType"]] = None, assignments_to_concatenate: Optional[List["AetherParameterAssignment"]] = None, data_path_assignment: Optional["AetherLegacyDataPath"] = None, data_set_definition_value_assignment: Optional["AetherDataSetDefinitionValue"] = None, name: Optional[str] = None, value: Optional[str] = None, **kwargs ): """ :keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition". :paramtype value_type: str or ~flow.models.AetherParameterValueType :keyword assignments_to_concatenate: :paramtype assignments_to_concatenate: list[~flow.models.AetherParameterAssignment] :keyword data_path_assignment: :paramtype data_path_assignment: ~flow.models.AetherLegacyDataPath :keyword data_set_definition_value_assignment: :paramtype data_set_definition_value_assignment: ~flow.models.AetherDataSetDefinitionValue :keyword name: :paramtype name: str :keyword value: :paramtype value: str """ super(AetherParameterAssignment, self).__init__(**kwargs) self.value_type = value_type self.assignments_to_concatenate = assignments_to_concatenate self.data_path_assignment = data_path_assignment self.data_set_definition_value_assignment = data_set_definition_value_assignment self.name = name self.value = value class AetherPhillyHdfsReference(msrest.serialization.Model): """AetherPhillyHdfsReference. :ivar cluster: :vartype cluster: str :ivar vc: :vartype vc: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'cluster': {'key': 'cluster', 'type': 'str'}, 'vc': {'key': 'vc', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, *, cluster: Optional[str] = None, vc: Optional[str] = None, relative_path: Optional[str] = None, **kwargs ): """ :keyword cluster: :paramtype cluster: str :keyword vc: :paramtype vc: str :keyword relative_path: :paramtype relative_path: str """ super(AetherPhillyHdfsReference, self).__init__(**kwargs) self.cluster = cluster self.vc = vc self.relative_path = relative_path class AetherPortInfo(msrest.serialization.Model): """AetherPortInfo. :ivar node_id: :vartype node_id: str :ivar port_name: :vartype port_name: str :ivar graph_port_name: :vartype graph_port_name: str :ivar is_parameter: :vartype is_parameter: bool :ivar web_service_port: :vartype web_service_port: str """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, 'graph_port_name': {'key': 'graphPortName', 'type': 'str'}, 'is_parameter': {'key': 'isParameter', 'type': 'bool'}, 'web_service_port': {'key': 'webServicePort', 'type': 'str'}, } def __init__( self, *, node_id: Optional[str] = None, port_name: Optional[str] = None, graph_port_name: Optional[str] = None, is_parameter: Optional[bool] = None, web_service_port: Optional[str] = None, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword port_name: :paramtype port_name: str :keyword graph_port_name: :paramtype graph_port_name: str :keyword is_parameter: :paramtype is_parameter: bool :keyword web_service_port: :paramtype web_service_port: str """ super(AetherPortInfo, self).__init__(**kwargs) self.node_id = node_id self.port_name = port_name self.graph_port_name = graph_port_name self.is_parameter = is_parameter self.web_service_port = web_service_port class AetherPriorityConfig(msrest.serialization.Model): """AetherPriorityConfig. :ivar job_priority: :vartype job_priority: int :ivar is_preemptible: :vartype is_preemptible: bool :ivar node_count_set: :vartype node_count_set: list[int] :ivar scale_interval: :vartype scale_interval: int """ _attribute_map = { 'job_priority': {'key': 'jobPriority', 'type': 'int'}, 'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'}, 'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'}, 'scale_interval': {'key': 'scaleInterval', 'type': 'int'}, } def __init__( self, *, job_priority: Optional[int] = None, is_preemptible: Optional[bool] = None, node_count_set: Optional[List[int]] = None, scale_interval: Optional[int] = None, **kwargs ): """ :keyword job_priority: :paramtype job_priority: int :keyword is_preemptible: :paramtype is_preemptible: bool :keyword node_count_set: :paramtype node_count_set: list[int] :keyword scale_interval: :paramtype scale_interval: int """ super(AetherPriorityConfig, self).__init__(**kwargs) self.job_priority = job_priority self.is_preemptible = is_preemptible self.node_count_set = node_count_set self.scale_interval = scale_interval class AetherPriorityConfiguration(msrest.serialization.Model): """AetherPriorityConfiguration. :ivar cloud_priority: :vartype cloud_priority: int :ivar string_type_priority: :vartype string_type_priority: str """ _attribute_map = { 'cloud_priority': {'key': 'cloudPriority', 'type': 'int'}, 'string_type_priority': {'key': 'stringTypePriority', 'type': 'str'}, } def __init__( self, *, cloud_priority: Optional[int] = None, string_type_priority: Optional[str] = None, **kwargs ): """ :keyword cloud_priority: :paramtype cloud_priority: int :keyword string_type_priority: :paramtype string_type_priority: str """ super(AetherPriorityConfiguration, self).__init__(**kwargs) self.cloud_priority = cloud_priority self.string_type_priority = string_type_priority class AetherRegisteredDataSetReference(msrest.serialization.Model): """AetherRegisteredDataSetReference. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar version: :vartype version: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, version: Optional[str] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword version: :paramtype version: str """ super(AetherRegisteredDataSetReference, self).__init__(**kwargs) self.id = id self.name = name self.version = version class AetherRemoteDockerComputeInfo(msrest.serialization.Model): """AetherRemoteDockerComputeInfo. :ivar address: :vartype address: str :ivar username: :vartype username: str :ivar password: :vartype password: str :ivar private_key: :vartype private_key: str """ _attribute_map = { 'address': {'key': 'address', 'type': 'str'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'private_key': {'key': 'privateKey', 'type': 'str'}, } def __init__( self, *, address: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, private_key: Optional[str] = None, **kwargs ): """ :keyword address: :paramtype address: str :keyword username: :paramtype username: str :keyword password: :paramtype password: str :keyword private_key: :paramtype private_key: str """ super(AetherRemoteDockerComputeInfo, self).__init__(**kwargs) self.address = address self.username = username self.password = password self.private_key = private_key class AetherResourceAssignment(msrest.serialization.Model): """AetherResourceAssignment. :ivar attributes: Dictionary of :code:`<AetherResourceAttributeAssignment>`. :vartype attributes: dict[str, ~flow.models.AetherResourceAttributeAssignment] """ _attribute_map = { 'attributes': {'key': 'attributes', 'type': '{AetherResourceAttributeAssignment}'}, } def __init__( self, *, attributes: Optional[Dict[str, "AetherResourceAttributeAssignment"]] = None, **kwargs ): """ :keyword attributes: Dictionary of :code:`<AetherResourceAttributeAssignment>`. :paramtype attributes: dict[str, ~flow.models.AetherResourceAttributeAssignment] """ super(AetherResourceAssignment, self).__init__(**kwargs) self.attributes = attributes class AetherResourceAttributeAssignment(msrest.serialization.Model): """AetherResourceAttributeAssignment. :ivar attribute: :vartype attribute: ~flow.models.AetherResourceAttributeDefinition :ivar operator: Possible values include: "Equal", "Contain", "GreaterOrEqual". :vartype operator: str or ~flow.models.AetherResourceOperator :ivar value: :vartype value: str """ _attribute_map = { 'attribute': {'key': 'attribute', 'type': 'AetherResourceAttributeDefinition'}, 'operator': {'key': 'operator', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, *, attribute: Optional["AetherResourceAttributeDefinition"] = None, operator: Optional[Union[str, "AetherResourceOperator"]] = None, value: Optional[str] = None, **kwargs ): """ :keyword attribute: :paramtype attribute: ~flow.models.AetherResourceAttributeDefinition :keyword operator: Possible values include: "Equal", "Contain", "GreaterOrEqual". :paramtype operator: str or ~flow.models.AetherResourceOperator :keyword value: :paramtype value: str """ super(AetherResourceAttributeAssignment, self).__init__(**kwargs) self.attribute = attribute self.operator = operator self.value = value class AetherResourceAttributeDefinition(msrest.serialization.Model): """AetherResourceAttributeDefinition. :ivar name: :vartype name: str :ivar type: Possible values include: "String", "Double". :vartype type: str or ~flow.models.AetherResourceValueType :ivar units: :vartype units: str :ivar allowed_operators: :vartype allowed_operators: list[str or ~flow.models.AetherResourceOperator] """ _validation = { 'allowed_operators': {'unique': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'units': {'key': 'units', 'type': 'str'}, 'allowed_operators': {'key': 'allowedOperators', 'type': '[str]'}, } def __init__( self, *, name: Optional[str] = None, type: Optional[Union[str, "AetherResourceValueType"]] = None, units: Optional[str] = None, allowed_operators: Optional[List[Union[str, "AetherResourceOperator"]]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: Possible values include: "String", "Double". :paramtype type: str or ~flow.models.AetherResourceValueType :keyword units: :paramtype units: str :keyword allowed_operators: :paramtype allowed_operators: list[str or ~flow.models.AetherResourceOperator] """ super(AetherResourceAttributeDefinition, self).__init__(**kwargs) self.name = name self.type = type self.units = units self.allowed_operators = allowed_operators class AetherResourceConfig(msrest.serialization.Model): """AetherResourceConfig. :ivar gpu_count: :vartype gpu_count: int :ivar cpu_count: :vartype cpu_count: int :ivar memory_request_in_gb: :vartype memory_request_in_gb: int """ _attribute_map = { 'gpu_count': {'key': 'gpuCount', 'type': 'int'}, 'cpu_count': {'key': 'cpuCount', 'type': 'int'}, 'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'}, } def __init__( self, *, gpu_count: Optional[int] = None, cpu_count: Optional[int] = None, memory_request_in_gb: Optional[int] = None, **kwargs ): """ :keyword gpu_count: :paramtype gpu_count: int :keyword cpu_count: :paramtype cpu_count: int :keyword memory_request_in_gb: :paramtype memory_request_in_gb: int """ super(AetherResourceConfig, self).__init__(**kwargs) self.gpu_count = gpu_count self.cpu_count = cpu_count self.memory_request_in_gb = memory_request_in_gb class AetherResourceConfiguration(msrest.serialization.Model): """AetherResourceConfiguration. :ivar instance_count: :vartype instance_count: int :ivar instance_type: :vartype instance_type: str :ivar properties: Dictionary of :code:`<any>`. :vartype properties: dict[str, any] :ivar locations: :vartype locations: list[str] :ivar instance_priority: :vartype instance_priority: str :ivar quota_enforcement_resource_id: :vartype quota_enforcement_resource_id: str """ _attribute_map = { 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{object}'}, 'locations': {'key': 'locations', 'type': '[str]'}, 'instance_priority': {'key': 'instancePriority', 'type': 'str'}, 'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'}, } def __init__( self, *, instance_count: Optional[int] = None, instance_type: Optional[str] = None, properties: Optional[Dict[str, Any]] = None, locations: Optional[List[str]] = None, instance_priority: Optional[str] = None, quota_enforcement_resource_id: Optional[str] = None, **kwargs ): """ :keyword instance_count: :paramtype instance_count: int :keyword instance_type: :paramtype instance_type: str :keyword properties: Dictionary of :code:`<any>`. :paramtype properties: dict[str, any] :keyword locations: :paramtype locations: list[str] :keyword instance_priority: :paramtype instance_priority: str :keyword quota_enforcement_resource_id: :paramtype quota_enforcement_resource_id: str """ super(AetherResourceConfiguration, self).__init__(**kwargs) self.instance_count = instance_count self.instance_type = instance_type self.properties = properties self.locations = locations self.instance_priority = instance_priority self.quota_enforcement_resource_id = quota_enforcement_resource_id class AetherResourceModel(msrest.serialization.Model): """AetherResourceModel. :ivar resources: :vartype resources: list[~flow.models.AetherResourceAssignment] """ _attribute_map = { 'resources': {'key': 'resources', 'type': '[AetherResourceAssignment]'}, } def __init__( self, *, resources: Optional[List["AetherResourceAssignment"]] = None, **kwargs ): """ :keyword resources: :paramtype resources: list[~flow.models.AetherResourceAssignment] """ super(AetherResourceModel, self).__init__(**kwargs) self.resources = resources class AetherResourcesSetting(msrest.serialization.Model): """AetherResourcesSetting. :ivar instance_size: :vartype instance_size: str :ivar spark_version: :vartype spark_version: str """ _attribute_map = { 'instance_size': {'key': 'instanceSize', 'type': 'str'}, 'spark_version': {'key': 'sparkVersion', 'type': 'str'}, } def __init__( self, *, instance_size: Optional[str] = None, spark_version: Optional[str] = None, **kwargs ): """ :keyword instance_size: :paramtype instance_size: str :keyword spark_version: :paramtype spark_version: str """ super(AetherResourcesSetting, self).__init__(**kwargs) self.instance_size = instance_size self.spark_version = spark_version class AetherSavedDataSetReference(msrest.serialization.Model): """AetherSavedDataSetReference. :ivar id: :vartype id: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, **kwargs ): """ :keyword id: :paramtype id: str """ super(AetherSavedDataSetReference, self).__init__(**kwargs) self.id = id class AetherScopeCloudConfiguration(msrest.serialization.Model): """AetherScopeCloudConfiguration. :ivar input_path_suffixes: This is a dictionary. :vartype input_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment] :ivar output_path_suffixes: This is a dictionary. :vartype output_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment] :ivar user_alias: :vartype user_alias: str :ivar tokens: :vartype tokens: int :ivar auto_token: :vartype auto_token: int :ivar vcp: :vartype vcp: float """ _attribute_map = { 'input_path_suffixes': {'key': 'inputPathSuffixes', 'type': '{AetherArgumentAssignment}'}, 'output_path_suffixes': {'key': 'outputPathSuffixes', 'type': '{AetherArgumentAssignment}'}, 'user_alias': {'key': 'userAlias', 'type': 'str'}, 'tokens': {'key': 'tokens', 'type': 'int'}, 'auto_token': {'key': 'autoToken', 'type': 'int'}, 'vcp': {'key': 'vcp', 'type': 'float'}, } def __init__( self, *, input_path_suffixes: Optional[Dict[str, "AetherArgumentAssignment"]] = None, output_path_suffixes: Optional[Dict[str, "AetherArgumentAssignment"]] = None, user_alias: Optional[str] = None, tokens: Optional[int] = None, auto_token: Optional[int] = None, vcp: Optional[float] = None, **kwargs ): """ :keyword input_path_suffixes: This is a dictionary. :paramtype input_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment] :keyword output_path_suffixes: This is a dictionary. :paramtype output_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment] :keyword user_alias: :paramtype user_alias: str :keyword tokens: :paramtype tokens: int :keyword auto_token: :paramtype auto_token: int :keyword vcp: :paramtype vcp: float """ super(AetherScopeCloudConfiguration, self).__init__(**kwargs) self.input_path_suffixes = input_path_suffixes self.output_path_suffixes = output_path_suffixes self.user_alias = user_alias self.tokens = tokens self.auto_token = auto_token self.vcp = vcp class AetherSeasonality(msrest.serialization.Model): """AetherSeasonality. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.AetherSeasonalityMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, *, mode: Optional[Union[str, "AetherSeasonalityMode"]] = None, value: Optional[int] = None, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.AetherSeasonalityMode :keyword value: :paramtype value: int """ super(AetherSeasonality, self).__init__(**kwargs) self.mode = mode self.value = value class AetherSqlDataPath(msrest.serialization.Model): """AetherSqlDataPath. :ivar sql_table_name: :vartype sql_table_name: str :ivar sql_query: :vartype sql_query: str :ivar sql_stored_procedure_name: :vartype sql_stored_procedure_name: str :ivar sql_stored_procedure_params: :vartype sql_stored_procedure_params: list[~flow.models.AetherStoredProcedureParameter] """ _attribute_map = { 'sql_table_name': {'key': 'sqlTableName', 'type': 'str'}, 'sql_query': {'key': 'sqlQuery', 'type': 'str'}, 'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'}, 'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[AetherStoredProcedureParameter]'}, } def __init__( self, *, sql_table_name: Optional[str] = None, sql_query: Optional[str] = None, sql_stored_procedure_name: Optional[str] = None, sql_stored_procedure_params: Optional[List["AetherStoredProcedureParameter"]] = None, **kwargs ): """ :keyword sql_table_name: :paramtype sql_table_name: str :keyword sql_query: :paramtype sql_query: str :keyword sql_stored_procedure_name: :paramtype sql_stored_procedure_name: str :keyword sql_stored_procedure_params: :paramtype sql_stored_procedure_params: list[~flow.models.AetherStoredProcedureParameter] """ super(AetherSqlDataPath, self).__init__(**kwargs) self.sql_table_name = sql_table_name self.sql_query = sql_query self.sql_stored_procedure_name = sql_stored_procedure_name self.sql_stored_procedure_params = sql_stored_procedure_params class AetherStackEnsembleSettings(msrest.serialization.Model): """AetherStackEnsembleSettings. :ivar stack_meta_learner_type: Possible values include: "None", "LogisticRegression", "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", "LightGBMRegressor", "LinearRegression". :vartype stack_meta_learner_type: str or ~flow.models.AetherStackMetaLearnerType :ivar stack_meta_learner_train_percentage: :vartype stack_meta_learner_train_percentage: float :ivar stack_meta_learner_k_wargs: Anything. :vartype stack_meta_learner_k_wargs: any """ _attribute_map = { 'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'}, 'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'}, 'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'}, } def __init__( self, *, stack_meta_learner_type: Optional[Union[str, "AetherStackMetaLearnerType"]] = None, stack_meta_learner_train_percentage: Optional[float] = None, stack_meta_learner_k_wargs: Optional[Any] = None, **kwargs ): """ :keyword stack_meta_learner_type: Possible values include: "None", "LogisticRegression", "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", "LightGBMRegressor", "LinearRegression". :paramtype stack_meta_learner_type: str or ~flow.models.AetherStackMetaLearnerType :keyword stack_meta_learner_train_percentage: :paramtype stack_meta_learner_train_percentage: float :keyword stack_meta_learner_k_wargs: Anything. :paramtype stack_meta_learner_k_wargs: any """ super(AetherStackEnsembleSettings, self).__init__(**kwargs) self.stack_meta_learner_type = stack_meta_learner_type self.stack_meta_learner_train_percentage = stack_meta_learner_train_percentage self.stack_meta_learner_k_wargs = stack_meta_learner_k_wargs class AetherStoredProcedureParameter(msrest.serialization.Model): """AetherStoredProcedureParameter. :ivar name: :vartype name: str :ivar value: :vartype value: str :ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date". :vartype type: str or ~flow.models.AetherStoredProcedureParameterType """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, value: Optional[str] = None, type: Optional[Union[str, "AetherStoredProcedureParameterType"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword value: :paramtype value: str :keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date". :paramtype type: str or ~flow.models.AetherStoredProcedureParameterType """ super(AetherStoredProcedureParameter, self).__init__(**kwargs) self.name = name self.value = value self.type = type class AetherStructuredInterface(msrest.serialization.Model): """AetherStructuredInterface. :ivar command_line_pattern: :vartype command_line_pattern: str :ivar inputs: :vartype inputs: list[~flow.models.AetherStructuredInterfaceInput] :ivar outputs: :vartype outputs: list[~flow.models.AetherStructuredInterfaceOutput] :ivar control_outputs: :vartype control_outputs: list[~flow.models.AetherControlOutput] :ivar parameters: :vartype parameters: list[~flow.models.AetherStructuredInterfaceParameter] :ivar metadata_parameters: :vartype metadata_parameters: list[~flow.models.AetherStructuredInterfaceParameter] :ivar arguments: :vartype arguments: list[~flow.models.AetherArgumentAssignment] """ _attribute_map = { 'command_line_pattern': {'key': 'commandLinePattern', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '[AetherStructuredInterfaceInput]'}, 'outputs': {'key': 'outputs', 'type': '[AetherStructuredInterfaceOutput]'}, 'control_outputs': {'key': 'controlOutputs', 'type': '[AetherControlOutput]'}, 'parameters': {'key': 'parameters', 'type': '[AetherStructuredInterfaceParameter]'}, 'metadata_parameters': {'key': 'metadataParameters', 'type': '[AetherStructuredInterfaceParameter]'}, 'arguments': {'key': 'arguments', 'type': '[AetherArgumentAssignment]'}, } def __init__( self, *, command_line_pattern: Optional[str] = None, inputs: Optional[List["AetherStructuredInterfaceInput"]] = None, outputs: Optional[List["AetherStructuredInterfaceOutput"]] = None, control_outputs: Optional[List["AetherControlOutput"]] = None, parameters: Optional[List["AetherStructuredInterfaceParameter"]] = None, metadata_parameters: Optional[List["AetherStructuredInterfaceParameter"]] = None, arguments: Optional[List["AetherArgumentAssignment"]] = None, **kwargs ): """ :keyword command_line_pattern: :paramtype command_line_pattern: str :keyword inputs: :paramtype inputs: list[~flow.models.AetherStructuredInterfaceInput] :keyword outputs: :paramtype outputs: list[~flow.models.AetherStructuredInterfaceOutput] :keyword control_outputs: :paramtype control_outputs: list[~flow.models.AetherControlOutput] :keyword parameters: :paramtype parameters: list[~flow.models.AetherStructuredInterfaceParameter] :keyword metadata_parameters: :paramtype metadata_parameters: list[~flow.models.AetherStructuredInterfaceParameter] :keyword arguments: :paramtype arguments: list[~flow.models.AetherArgumentAssignment] """ super(AetherStructuredInterface, self).__init__(**kwargs) self.command_line_pattern = command_line_pattern self.inputs = inputs self.outputs = outputs self.control_outputs = control_outputs self.parameters = parameters self.metadata_parameters = metadata_parameters self.arguments = arguments class AetherStructuredInterfaceInput(msrest.serialization.Model): """AetherStructuredInterfaceInput. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar data_type_ids_list: :vartype data_type_ids_list: list[str] :ivar is_optional: :vartype is_optional: bool :ivar description: :vartype description: str :ivar skip_processing: :vartype skip_processing: bool :ivar is_resource: :vartype is_resource: bool :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AetherDataStoreMode :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool :ivar data_reference_name: :vartype data_reference_name: str :ivar dataset_types: :vartype dataset_types: list[str or ~flow.models.AetherDatasetType] :ivar additional_transformations: :vartype additional_transformations: str """ _validation = { 'dataset_types': {'unique': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'data_type_ids_list': {'key': 'dataTypeIdsList', 'type': '[str]'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'skip_processing': {'key': 'skipProcessing', 'type': 'bool'}, 'is_resource': {'key': 'isResource', 'type': 'bool'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'}, 'dataset_types': {'key': 'datasetTypes', 'type': '[str]'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, label: Optional[str] = None, data_type_ids_list: Optional[List[str]] = None, is_optional: Optional[bool] = None, description: Optional[str] = None, skip_processing: Optional[bool] = None, is_resource: Optional[bool] = None, data_store_mode: Optional[Union[str, "AetherDataStoreMode"]] = None, path_on_compute: Optional[str] = None, overwrite: Optional[bool] = None, data_reference_name: Optional[str] = None, dataset_types: Optional[List[Union[str, "AetherDatasetType"]]] = None, additional_transformations: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword data_type_ids_list: :paramtype data_type_ids_list: list[str] :keyword is_optional: :paramtype is_optional: bool :keyword description: :paramtype description: str :keyword skip_processing: :paramtype skip_processing: bool :keyword is_resource: :paramtype is_resource: bool :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool :keyword data_reference_name: :paramtype data_reference_name: str :keyword dataset_types: :paramtype dataset_types: list[str or ~flow.models.AetherDatasetType] :keyword additional_transformations: :paramtype additional_transformations: str """ super(AetherStructuredInterfaceInput, self).__init__(**kwargs) self.name = name self.label = label self.data_type_ids_list = data_type_ids_list self.is_optional = is_optional self.description = description self.skip_processing = skip_processing self.is_resource = is_resource self.data_store_mode = data_store_mode self.path_on_compute = path_on_compute self.overwrite = overwrite self.data_reference_name = data_reference_name self.dataset_types = dataset_types self.additional_transformations = additional_transformations class AetherStructuredInterfaceOutput(msrest.serialization.Model): """AetherStructuredInterfaceOutput. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar data_type_id: :vartype data_type_id: str :ivar pass_through_data_type_input_name: :vartype pass_through_data_type_input_name: str :ivar description: :vartype description: str :ivar skip_processing: :vartype skip_processing: bool :ivar is_artifact: :vartype is_artifact: bool :ivar data_store_name: :vartype data_store_name: str :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AetherDataStoreMode :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool :ivar data_reference_name: :vartype data_reference_name: str :ivar training_output: :vartype training_output: ~flow.models.AetherTrainingOutput :ivar dataset_output: :vartype dataset_output: ~flow.models.AetherDatasetOutput :ivar asset_output_settings: :vartype asset_output_settings: ~flow.models.AetherAssetOutputSettings :ivar early_available: :vartype early_available: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'data_type_id': {'key': 'dataTypeId', 'type': 'str'}, 'pass_through_data_type_input_name': {'key': 'passThroughDataTypeInputName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'skip_processing': {'key': 'skipProcessing', 'type': 'bool'}, 'is_artifact': {'key': 'isArtifact', 'type': 'bool'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'}, 'training_output': {'key': 'trainingOutput', 'type': 'AetherTrainingOutput'}, 'dataset_output': {'key': 'datasetOutput', 'type': 'AetherDatasetOutput'}, 'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AetherAssetOutputSettings'}, 'early_available': {'key': 'earlyAvailable', 'type': 'bool'}, } def __init__( self, *, name: Optional[str] = None, label: Optional[str] = None, data_type_id: Optional[str] = None, pass_through_data_type_input_name: Optional[str] = None, description: Optional[str] = None, skip_processing: Optional[bool] = None, is_artifact: Optional[bool] = None, data_store_name: Optional[str] = None, data_store_mode: Optional[Union[str, "AetherDataStoreMode"]] = None, path_on_compute: Optional[str] = None, overwrite: Optional[bool] = None, data_reference_name: Optional[str] = None, training_output: Optional["AetherTrainingOutput"] = None, dataset_output: Optional["AetherDatasetOutput"] = None, asset_output_settings: Optional["AetherAssetOutputSettings"] = None, early_available: Optional[bool] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword data_type_id: :paramtype data_type_id: str :keyword pass_through_data_type_input_name: :paramtype pass_through_data_type_input_name: str :keyword description: :paramtype description: str :keyword skip_processing: :paramtype skip_processing: bool :keyword is_artifact: :paramtype is_artifact: bool :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool :keyword data_reference_name: :paramtype data_reference_name: str :keyword training_output: :paramtype training_output: ~flow.models.AetherTrainingOutput :keyword dataset_output: :paramtype dataset_output: ~flow.models.AetherDatasetOutput :keyword asset_output_settings: :paramtype asset_output_settings: ~flow.models.AetherAssetOutputSettings :keyword early_available: :paramtype early_available: bool """ super(AetherStructuredInterfaceOutput, self).__init__(**kwargs) self.name = name self.label = label self.data_type_id = data_type_id self.pass_through_data_type_input_name = pass_through_data_type_input_name self.description = description self.skip_processing = skip_processing self.is_artifact = is_artifact self.data_store_name = data_store_name self.data_store_mode = data_store_mode self.path_on_compute = path_on_compute self.overwrite = overwrite self.data_reference_name = data_reference_name self.training_output = training_output self.dataset_output = dataset_output self.asset_output_settings = asset_output_settings self.early_available = early_available class AetherStructuredInterfaceParameter(msrest.serialization.Model): """AetherStructuredInterfaceParameter. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined". :vartype parameter_type: str or ~flow.models.AetherParameterType :ivar is_optional: :vartype is_optional: bool :ivar default_value: :vartype default_value: str :ivar lower_bound: :vartype lower_bound: str :ivar upper_bound: :vartype upper_bound: str :ivar enum_values: :vartype enum_values: list[str] :ivar enum_values_to_argument_strings: This is a dictionary. :vartype enum_values_to_argument_strings: dict[str, str] :ivar description: :vartype description: str :ivar set_environment_variable: :vartype set_environment_variable: bool :ivar environment_variable_override: :vartype environment_variable_override: str :ivar enabled_by_parameter_name: :vartype enabled_by_parameter_name: str :ivar enabled_by_parameter_values: :vartype enabled_by_parameter_values: list[str] :ivar ui_hint: :vartype ui_hint: ~flow.models.AetherUIParameterHint :ivar group_names: :vartype group_names: list[str] :ivar argument_name: :vartype argument_name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'parameter_type': {'key': 'parameterType', 'type': 'str'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, 'lower_bound': {'key': 'lowerBound', 'type': 'str'}, 'upper_bound': {'key': 'upperBound', 'type': 'str'}, 'enum_values': {'key': 'enumValues', 'type': '[str]'}, 'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'}, 'description': {'key': 'description', 'type': 'str'}, 'set_environment_variable': {'key': 'setEnvironmentVariable', 'type': 'bool'}, 'environment_variable_override': {'key': 'environmentVariableOverride', 'type': 'str'}, 'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'}, 'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'}, 'ui_hint': {'key': 'uiHint', 'type': 'AetherUIParameterHint'}, 'group_names': {'key': 'groupNames', 'type': '[str]'}, 'argument_name': {'key': 'argumentName', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, label: Optional[str] = None, parameter_type: Optional[Union[str, "AetherParameterType"]] = None, is_optional: Optional[bool] = None, default_value: Optional[str] = None, lower_bound: Optional[str] = None, upper_bound: Optional[str] = None, enum_values: Optional[List[str]] = None, enum_values_to_argument_strings: Optional[Dict[str, str]] = None, description: Optional[str] = None, set_environment_variable: Optional[bool] = None, environment_variable_override: Optional[str] = None, enabled_by_parameter_name: Optional[str] = None, enabled_by_parameter_values: Optional[List[str]] = None, ui_hint: Optional["AetherUIParameterHint"] = None, group_names: Optional[List[str]] = None, argument_name: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined". :paramtype parameter_type: str or ~flow.models.AetherParameterType :keyword is_optional: :paramtype is_optional: bool :keyword default_value: :paramtype default_value: str :keyword lower_bound: :paramtype lower_bound: str :keyword upper_bound: :paramtype upper_bound: str :keyword enum_values: :paramtype enum_values: list[str] :keyword enum_values_to_argument_strings: This is a dictionary. :paramtype enum_values_to_argument_strings: dict[str, str] :keyword description: :paramtype description: str :keyword set_environment_variable: :paramtype set_environment_variable: bool :keyword environment_variable_override: :paramtype environment_variable_override: str :keyword enabled_by_parameter_name: :paramtype enabled_by_parameter_name: str :keyword enabled_by_parameter_values: :paramtype enabled_by_parameter_values: list[str] :keyword ui_hint: :paramtype ui_hint: ~flow.models.AetherUIParameterHint :keyword group_names: :paramtype group_names: list[str] :keyword argument_name: :paramtype argument_name: str """ super(AetherStructuredInterfaceParameter, self).__init__(**kwargs) self.name = name self.label = label self.parameter_type = parameter_type self.is_optional = is_optional self.default_value = default_value self.lower_bound = lower_bound self.upper_bound = upper_bound self.enum_values = enum_values self.enum_values_to_argument_strings = enum_values_to_argument_strings self.description = description self.set_environment_variable = set_environment_variable self.environment_variable_override = environment_variable_override self.enabled_by_parameter_name = enabled_by_parameter_name self.enabled_by_parameter_values = enabled_by_parameter_values self.ui_hint = ui_hint self.group_names = group_names self.argument_name = argument_name class AetherSubGraphConfiguration(msrest.serialization.Model): """AetherSubGraphConfiguration. :ivar graph_id: :vartype graph_id: str :ivar graph_draft_id: :vartype graph_draft_id: str :ivar default_compute_internal: :vartype default_compute_internal: ~flow.models.AetherComputeSetting :ivar default_datastore_internal: :vartype default_datastore_internal: ~flow.models.AetherDatastoreSetting :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting :ivar user_alias: :vartype user_alias: str :ivar is_dynamic: :vartype is_dynamic: bool """ _attribute_map = { 'graph_id': {'key': 'graphId', 'type': 'str'}, 'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'}, 'default_compute_internal': {'key': 'defaultComputeInternal', 'type': 'AetherComputeSetting'}, 'default_datastore_internal': {'key': 'defaultDatastoreInternal', 'type': 'AetherDatastoreSetting'}, 'default_cloud_priority': {'key': 'DefaultCloudPriority', 'type': 'AetherCloudPrioritySetting'}, 'user_alias': {'key': 'UserAlias', 'type': 'str'}, 'is_dynamic': {'key': 'IsDynamic', 'type': 'bool'}, } def __init__( self, *, graph_id: Optional[str] = None, graph_draft_id: Optional[str] = None, default_compute_internal: Optional["AetherComputeSetting"] = None, default_datastore_internal: Optional["AetherDatastoreSetting"] = None, default_cloud_priority: Optional["AetherCloudPrioritySetting"] = None, user_alias: Optional[str] = None, is_dynamic: Optional[bool] = False, **kwargs ): """ :keyword graph_id: :paramtype graph_id: str :keyword graph_draft_id: :paramtype graph_draft_id: str :keyword default_compute_internal: :paramtype default_compute_internal: ~flow.models.AetherComputeSetting :keyword default_datastore_internal: :paramtype default_datastore_internal: ~flow.models.AetherDatastoreSetting :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting :keyword user_alias: :paramtype user_alias: str :keyword is_dynamic: :paramtype is_dynamic: bool """ super(AetherSubGraphConfiguration, self).__init__(**kwargs) self.graph_id = graph_id self.graph_draft_id = graph_draft_id self.default_compute_internal = default_compute_internal self.default_datastore_internal = default_datastore_internal self.default_cloud_priority = default_cloud_priority self.user_alias = user_alias self.is_dynamic = is_dynamic class AetherSweepEarlyTerminationPolicy(msrest.serialization.Model): """AetherSweepEarlyTerminationPolicy. :ivar policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection". :vartype policy_type: str or ~flow.models.AetherEarlyTerminationPolicyType :ivar evaluation_interval: :vartype evaluation_interval: int :ivar delay_evaluation: :vartype delay_evaluation: int :ivar slack_factor: :vartype slack_factor: float :ivar slack_amount: :vartype slack_amount: float :ivar truncation_percentage: :vartype truncation_percentage: int """ _attribute_map = { 'policy_type': {'key': 'policyType', 'type': 'str'}, 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, 'slack_factor': {'key': 'slackFactor', 'type': 'float'}, 'slack_amount': {'key': 'slackAmount', 'type': 'float'}, 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'}, } def __init__( self, *, policy_type: Optional[Union[str, "AetherEarlyTerminationPolicyType"]] = None, evaluation_interval: Optional[int] = None, delay_evaluation: Optional[int] = None, slack_factor: Optional[float] = None, slack_amount: Optional[float] = None, truncation_percentage: Optional[int] = None, **kwargs ): """ :keyword policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection". :paramtype policy_type: str or ~flow.models.AetherEarlyTerminationPolicyType :keyword evaluation_interval: :paramtype evaluation_interval: int :keyword delay_evaluation: :paramtype delay_evaluation: int :keyword slack_factor: :paramtype slack_factor: float :keyword slack_amount: :paramtype slack_amount: float :keyword truncation_percentage: :paramtype truncation_percentage: int """ super(AetherSweepEarlyTerminationPolicy, self).__init__(**kwargs) self.policy_type = policy_type self.evaluation_interval = evaluation_interval self.delay_evaluation = delay_evaluation self.slack_factor = slack_factor self.slack_amount = slack_amount self.truncation_percentage = truncation_percentage class AetherSweepSettings(msrest.serialization.Model): """AetherSweepSettings. :ivar limits: :vartype limits: ~flow.models.AetherSweepSettingsLimits :ivar search_space: :vartype search_space: list[dict[str, str]] :ivar sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian". :vartype sampling_algorithm: str or ~flow.models.AetherSamplingAlgorithmType :ivar early_termination: :vartype early_termination: ~flow.models.AetherSweepEarlyTerminationPolicy """ _attribute_map = { 'limits': {'key': 'limits', 'type': 'AetherSweepSettingsLimits'}, 'search_space': {'key': 'searchSpace', 'type': '[{str}]'}, 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, 'early_termination': {'key': 'earlyTermination', 'type': 'AetherSweepEarlyTerminationPolicy'}, } def __init__( self, *, limits: Optional["AetherSweepSettingsLimits"] = None, search_space: Optional[List[Dict[str, str]]] = None, sampling_algorithm: Optional[Union[str, "AetherSamplingAlgorithmType"]] = None, early_termination: Optional["AetherSweepEarlyTerminationPolicy"] = None, **kwargs ): """ :keyword limits: :paramtype limits: ~flow.models.AetherSweepSettingsLimits :keyword search_space: :paramtype search_space: list[dict[str, str]] :keyword sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian". :paramtype sampling_algorithm: str or ~flow.models.AetherSamplingAlgorithmType :keyword early_termination: :paramtype early_termination: ~flow.models.AetherSweepEarlyTerminationPolicy """ super(AetherSweepSettings, self).__init__(**kwargs) self.limits = limits self.search_space = search_space self.sampling_algorithm = sampling_algorithm self.early_termination = early_termination class AetherSweepSettingsLimits(msrest.serialization.Model): """AetherSweepSettingsLimits. :ivar max_total_trials: :vartype max_total_trials: int :ivar max_concurrent_trials: :vartype max_concurrent_trials: int """ _attribute_map = { 'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'}, 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, } def __init__( self, *, max_total_trials: Optional[int] = None, max_concurrent_trials: Optional[int] = None, **kwargs ): """ :keyword max_total_trials: :paramtype max_total_trials: int :keyword max_concurrent_trials: :paramtype max_concurrent_trials: int """ super(AetherSweepSettingsLimits, self).__init__(**kwargs) self.max_total_trials = max_total_trials self.max_concurrent_trials = max_concurrent_trials class AetherTargetLags(msrest.serialization.Model): """AetherTargetLags. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.AetherTargetLagsMode :ivar values: :vartype values: list[int] """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'values': {'key': 'values', 'type': '[int]'}, } def __init__( self, *, mode: Optional[Union[str, "AetherTargetLagsMode"]] = None, values: Optional[List[int]] = None, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.AetherTargetLagsMode :keyword values: :paramtype values: list[int] """ super(AetherTargetLags, self).__init__(**kwargs) self.mode = mode self.values = values class AetherTargetRollingWindowSize(msrest.serialization.Model): """AetherTargetRollingWindowSize. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.AetherTargetRollingWindowSizeMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, *, mode: Optional[Union[str, "AetherTargetRollingWindowSizeMode"]] = None, value: Optional[int] = None, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.AetherTargetRollingWindowSizeMode :keyword value: :paramtype value: int """ super(AetherTargetRollingWindowSize, self).__init__(**kwargs) self.mode = mode self.value = value class AetherTargetSelectorConfiguration(msrest.serialization.Model): """AetherTargetSelectorConfiguration. :ivar low_priority_vm_tolerant: :vartype low_priority_vm_tolerant: bool :ivar cluster_block_list: :vartype cluster_block_list: list[str] :ivar compute_type: :vartype compute_type: str :ivar instance_type: :vartype instance_type: list[str] :ivar instance_types: :vartype instance_types: list[str] :ivar my_resource_only: :vartype my_resource_only: bool :ivar plan_id: :vartype plan_id: str :ivar plan_region_id: :vartype plan_region_id: str :ivar region: :vartype region: list[str] :ivar regions: :vartype regions: list[str] :ivar vc_block_list: :vartype vc_block_list: list[str] """ _attribute_map = { 'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'}, 'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'instance_type': {'key': 'instanceType', 'type': '[str]'}, 'instance_types': {'key': 'instanceTypes', 'type': '[str]'}, 'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'}, 'plan_id': {'key': 'planId', 'type': 'str'}, 'plan_region_id': {'key': 'planRegionId', 'type': 'str'}, 'region': {'key': 'region', 'type': '[str]'}, 'regions': {'key': 'regions', 'type': '[str]'}, 'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'}, } def __init__( self, *, low_priority_vm_tolerant: Optional[bool] = None, cluster_block_list: Optional[List[str]] = None, compute_type: Optional[str] = None, instance_type: Optional[List[str]] = None, instance_types: Optional[List[str]] = None, my_resource_only: Optional[bool] = None, plan_id: Optional[str] = None, plan_region_id: Optional[str] = None, region: Optional[List[str]] = None, regions: Optional[List[str]] = None, vc_block_list: Optional[List[str]] = None, **kwargs ): """ :keyword low_priority_vm_tolerant: :paramtype low_priority_vm_tolerant: bool :keyword cluster_block_list: :paramtype cluster_block_list: list[str] :keyword compute_type: :paramtype compute_type: str :keyword instance_type: :paramtype instance_type: list[str] :keyword instance_types: :paramtype instance_types: list[str] :keyword my_resource_only: :paramtype my_resource_only: bool :keyword plan_id: :paramtype plan_id: str :keyword plan_region_id: :paramtype plan_region_id: str :keyword region: :paramtype region: list[str] :keyword regions: :paramtype regions: list[str] :keyword vc_block_list: :paramtype vc_block_list: list[str] """ super(AetherTargetSelectorConfiguration, self).__init__(**kwargs) self.low_priority_vm_tolerant = low_priority_vm_tolerant self.cluster_block_list = cluster_block_list self.compute_type = compute_type self.instance_type = instance_type self.instance_types = instance_types self.my_resource_only = my_resource_only self.plan_id = plan_id self.plan_region_id = plan_region_id self.region = region self.regions = regions self.vc_block_list = vc_block_list class AetherTestDataSettings(msrest.serialization.Model): """AetherTestDataSettings. :ivar test_data_size: :vartype test_data_size: float """ _attribute_map = { 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, } def __init__( self, *, test_data_size: Optional[float] = None, **kwargs ): """ :keyword test_data_size: :paramtype test_data_size: float """ super(AetherTestDataSettings, self).__init__(**kwargs) self.test_data_size = test_data_size class AetherTorchDistributedConfiguration(msrest.serialization.Model): """AetherTorchDistributedConfiguration. :ivar process_count_per_node: :vartype process_count_per_node: int """ _attribute_map = { 'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'}, } def __init__( self, *, process_count_per_node: Optional[int] = None, **kwargs ): """ :keyword process_count_per_node: :paramtype process_count_per_node: int """ super(AetherTorchDistributedConfiguration, self).__init__(**kwargs) self.process_count_per_node = process_count_per_node class AetherTrainingOutput(msrest.serialization.Model): """AetherTrainingOutput. :ivar training_output_type: Possible values include: "Metrics", "Model". :vartype training_output_type: str or ~flow.models.AetherTrainingOutputType :ivar iteration: :vartype iteration: int :ivar metric: :vartype metric: str :ivar model_file: :vartype model_file: str """ _attribute_map = { 'training_output_type': {'key': 'trainingOutputType', 'type': 'str'}, 'iteration': {'key': 'iteration', 'type': 'int'}, 'metric': {'key': 'metric', 'type': 'str'}, 'model_file': {'key': 'modelFile', 'type': 'str'}, } def __init__( self, *, training_output_type: Optional[Union[str, "AetherTrainingOutputType"]] = None, iteration: Optional[int] = None, metric: Optional[str] = None, model_file: Optional[str] = None, **kwargs ): """ :keyword training_output_type: Possible values include: "Metrics", "Model". :paramtype training_output_type: str or ~flow.models.AetherTrainingOutputType :keyword iteration: :paramtype iteration: int :keyword metric: :paramtype metric: str :keyword model_file: :paramtype model_file: str """ super(AetherTrainingOutput, self).__init__(**kwargs) self.training_output_type = training_output_type self.iteration = iteration self.metric = metric self.model_file = model_file class AetherTrainingSettings(msrest.serialization.Model): """AetherTrainingSettings. :ivar block_list_models: :vartype block_list_models: list[str] :ivar allow_list_models: :vartype allow_list_models: list[str] :ivar enable_dnn_training: :vartype enable_dnn_training: bool :ivar enable_onnx_compatible_models: :vartype enable_onnx_compatible_models: bool :ivar stack_ensemble_settings: :vartype stack_ensemble_settings: ~flow.models.AetherStackEnsembleSettings :ivar enable_stack_ensemble: :vartype enable_stack_ensemble: bool :ivar enable_vote_ensemble: :vartype enable_vote_ensemble: bool :ivar ensemble_model_download_timeout: :vartype ensemble_model_download_timeout: str :ivar enable_model_explainability: :vartype enable_model_explainability: bool :ivar training_mode: Possible values include: "Distributed", "NonDistributed", "Auto". :vartype training_mode: str or ~flow.models.AetherTabularTrainingMode """ _attribute_map = { 'block_list_models': {'key': 'blockListModels', 'type': '[str]'}, 'allow_list_models': {'key': 'allowListModels', 'type': '[str]'}, 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'AetherStackEnsembleSettings'}, 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'str'}, 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, 'training_mode': {'key': 'trainingMode', 'type': 'str'}, } def __init__( self, *, block_list_models: Optional[List[str]] = None, allow_list_models: Optional[List[str]] = None, enable_dnn_training: Optional[bool] = None, enable_onnx_compatible_models: Optional[bool] = None, stack_ensemble_settings: Optional["AetherStackEnsembleSettings"] = None, enable_stack_ensemble: Optional[bool] = None, enable_vote_ensemble: Optional[bool] = None, ensemble_model_download_timeout: Optional[str] = None, enable_model_explainability: Optional[bool] = None, training_mode: Optional[Union[str, "AetherTabularTrainingMode"]] = None, **kwargs ): """ :keyword block_list_models: :paramtype block_list_models: list[str] :keyword allow_list_models: :paramtype allow_list_models: list[str] :keyword enable_dnn_training: :paramtype enable_dnn_training: bool :keyword enable_onnx_compatible_models: :paramtype enable_onnx_compatible_models: bool :keyword stack_ensemble_settings: :paramtype stack_ensemble_settings: ~flow.models.AetherStackEnsembleSettings :keyword enable_stack_ensemble: :paramtype enable_stack_ensemble: bool :keyword enable_vote_ensemble: :paramtype enable_vote_ensemble: bool :keyword ensemble_model_download_timeout: :paramtype ensemble_model_download_timeout: str :keyword enable_model_explainability: :paramtype enable_model_explainability: bool :keyword training_mode: Possible values include: "Distributed", "NonDistributed", "Auto". :paramtype training_mode: str or ~flow.models.AetherTabularTrainingMode """ super(AetherTrainingSettings, self).__init__(**kwargs) self.block_list_models = block_list_models self.allow_list_models = allow_list_models self.enable_dnn_training = enable_dnn_training self.enable_onnx_compatible_models = enable_onnx_compatible_models self.stack_ensemble_settings = stack_ensemble_settings self.enable_stack_ensemble = enable_stack_ensemble self.enable_vote_ensemble = enable_vote_ensemble self.ensemble_model_download_timeout = ensemble_model_download_timeout self.enable_model_explainability = enable_model_explainability self.training_mode = training_mode class AetherUIAzureOpenAIDeploymentNameSelector(msrest.serialization.Model): """AetherUIAzureOpenAIDeploymentNameSelector. :ivar capabilities: :vartype capabilities: ~flow.models.AetherUIAzureOpenAIModelCapabilities """ _attribute_map = { 'capabilities': {'key': 'Capabilities', 'type': 'AetherUIAzureOpenAIModelCapabilities'}, } def __init__( self, *, capabilities: Optional["AetherUIAzureOpenAIModelCapabilities"] = None, **kwargs ): """ :keyword capabilities: :paramtype capabilities: ~flow.models.AetherUIAzureOpenAIModelCapabilities """ super(AetherUIAzureOpenAIDeploymentNameSelector, self).__init__(**kwargs) self.capabilities = capabilities class AetherUIAzureOpenAIModelCapabilities(msrest.serialization.Model): """AetherUIAzureOpenAIModelCapabilities. :ivar completion: :vartype completion: bool :ivar chat_completion: :vartype chat_completion: bool :ivar embeddings: :vartype embeddings: bool """ _attribute_map = { 'completion': {'key': 'Completion', 'type': 'bool'}, 'chat_completion': {'key': 'ChatCompletion', 'type': 'bool'}, 'embeddings': {'key': 'Embeddings', 'type': 'bool'}, } def __init__( self, *, completion: Optional[bool] = None, chat_completion: Optional[bool] = None, embeddings: Optional[bool] = None, **kwargs ): """ :keyword completion: :paramtype completion: bool :keyword chat_completion: :paramtype chat_completion: bool :keyword embeddings: :paramtype embeddings: bool """ super(AetherUIAzureOpenAIModelCapabilities, self).__init__(**kwargs) self.completion = completion self.chat_completion = chat_completion self.embeddings = embeddings class AetherUIColumnPicker(msrest.serialization.Model): """AetherUIColumnPicker. :ivar column_picker_for: :vartype column_picker_for: str :ivar column_selection_categories: :vartype column_selection_categories: list[str] :ivar single_column_selection: :vartype single_column_selection: bool """ _attribute_map = { 'column_picker_for': {'key': 'columnPickerFor', 'type': 'str'}, 'column_selection_categories': {'key': 'columnSelectionCategories', 'type': '[str]'}, 'single_column_selection': {'key': 'singleColumnSelection', 'type': 'bool'}, } def __init__( self, *, column_picker_for: Optional[str] = None, column_selection_categories: Optional[List[str]] = None, single_column_selection: Optional[bool] = None, **kwargs ): """ :keyword column_picker_for: :paramtype column_picker_for: str :keyword column_selection_categories: :paramtype column_selection_categories: list[str] :keyword single_column_selection: :paramtype single_column_selection: bool """ super(AetherUIColumnPicker, self).__init__(**kwargs) self.column_picker_for = column_picker_for self.column_selection_categories = column_selection_categories self.single_column_selection = single_column_selection class AetherUIJsonEditor(msrest.serialization.Model): """AetherUIJsonEditor. :ivar json_schema: :vartype json_schema: str """ _attribute_map = { 'json_schema': {'key': 'jsonSchema', 'type': 'str'}, } def __init__( self, *, json_schema: Optional[str] = None, **kwargs ): """ :keyword json_schema: :paramtype json_schema: str """ super(AetherUIJsonEditor, self).__init__(**kwargs) self.json_schema = json_schema class AetherUIParameterHint(msrest.serialization.Model): """AetherUIParameterHint. :ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection". :vartype ui_widget_type: str or ~flow.models.AetherUIWidgetTypeEnum :ivar column_picker: :vartype column_picker: ~flow.models.AetherUIColumnPicker :ivar ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql". :vartype ui_script_language: str or ~flow.models.AetherUIScriptLanguageEnum :ivar json_editor: :vartype json_editor: ~flow.models.AetherUIJsonEditor :ivar prompt_flow_connection_selector: :vartype prompt_flow_connection_selector: ~flow.models.AetherUIPromptFlowConnectionSelector :ivar azure_open_ai_deployment_name_selector: :vartype azure_open_ai_deployment_name_selector: ~flow.models.AetherUIAzureOpenAIDeploymentNameSelector :ivar ux_ignore: :vartype ux_ignore: bool :ivar anonymous: :vartype anonymous: bool """ _attribute_map = { 'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'}, 'column_picker': {'key': 'columnPicker', 'type': 'AetherUIColumnPicker'}, 'ui_script_language': {'key': 'uiScriptLanguage', 'type': 'str'}, 'json_editor': {'key': 'jsonEditor', 'type': 'AetherUIJsonEditor'}, 'prompt_flow_connection_selector': {'key': 'PromptFlowConnectionSelector', 'type': 'AetherUIPromptFlowConnectionSelector'}, 'azure_open_ai_deployment_name_selector': {'key': 'AzureOpenAIDeploymentNameSelector', 'type': 'AetherUIAzureOpenAIDeploymentNameSelector'}, 'ux_ignore': {'key': 'UxIgnore', 'type': 'bool'}, 'anonymous': {'key': 'Anonymous', 'type': 'bool'}, } def __init__( self, *, ui_widget_type: Optional[Union[str, "AetherUIWidgetTypeEnum"]] = None, column_picker: Optional["AetherUIColumnPicker"] = None, ui_script_language: Optional[Union[str, "AetherUIScriptLanguageEnum"]] = None, json_editor: Optional["AetherUIJsonEditor"] = None, prompt_flow_connection_selector: Optional["AetherUIPromptFlowConnectionSelector"] = None, azure_open_ai_deployment_name_selector: Optional["AetherUIAzureOpenAIDeploymentNameSelector"] = None, ux_ignore: Optional[bool] = None, anonymous: Optional[bool] = None, **kwargs ): """ :keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection". :paramtype ui_widget_type: str or ~flow.models.AetherUIWidgetTypeEnum :keyword column_picker: :paramtype column_picker: ~flow.models.AetherUIColumnPicker :keyword ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql". :paramtype ui_script_language: str or ~flow.models.AetherUIScriptLanguageEnum :keyword json_editor: :paramtype json_editor: ~flow.models.AetherUIJsonEditor :keyword prompt_flow_connection_selector: :paramtype prompt_flow_connection_selector: ~flow.models.AetherUIPromptFlowConnectionSelector :keyword azure_open_ai_deployment_name_selector: :paramtype azure_open_ai_deployment_name_selector: ~flow.models.AetherUIAzureOpenAIDeploymentNameSelector :keyword ux_ignore: :paramtype ux_ignore: bool :keyword anonymous: :paramtype anonymous: bool """ super(AetherUIParameterHint, self).__init__(**kwargs) self.ui_widget_type = ui_widget_type self.column_picker = column_picker self.ui_script_language = ui_script_language self.json_editor = json_editor self.prompt_flow_connection_selector = prompt_flow_connection_selector self.azure_open_ai_deployment_name_selector = azure_open_ai_deployment_name_selector self.ux_ignore = ux_ignore self.anonymous = anonymous class AetherUIPromptFlowConnectionSelector(msrest.serialization.Model): """AetherUIPromptFlowConnectionSelector. :ivar prompt_flow_connection_type: :vartype prompt_flow_connection_type: str """ _attribute_map = { 'prompt_flow_connection_type': {'key': 'PromptFlowConnectionType', 'type': 'str'}, } def __init__( self, *, prompt_flow_connection_type: Optional[str] = None, **kwargs ): """ :keyword prompt_flow_connection_type: :paramtype prompt_flow_connection_type: str """ super(AetherUIPromptFlowConnectionSelector, self).__init__(**kwargs) self.prompt_flow_connection_type = prompt_flow_connection_type class AetherValidationDataSettings(msrest.serialization.Model): """AetherValidationDataSettings. :ivar n_cross_validations: :vartype n_cross_validations: ~flow.models.AetherNCrossValidations :ivar validation_data_size: :vartype validation_data_size: float :ivar cv_split_column_names: :vartype cv_split_column_names: list[str] :ivar validation_type: :vartype validation_type: str """ _attribute_map = { 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'AetherNCrossValidations'}, 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, 'validation_type': {'key': 'validationType', 'type': 'str'}, } def __init__( self, *, n_cross_validations: Optional["AetherNCrossValidations"] = None, validation_data_size: Optional[float] = None, cv_split_column_names: Optional[List[str]] = None, validation_type: Optional[str] = None, **kwargs ): """ :keyword n_cross_validations: :paramtype n_cross_validations: ~flow.models.AetherNCrossValidations :keyword validation_data_size: :paramtype validation_data_size: float :keyword cv_split_column_names: :paramtype cv_split_column_names: list[str] :keyword validation_type: :paramtype validation_type: str """ super(AetherValidationDataSettings, self).__init__(**kwargs) self.n_cross_validations = n_cross_validations self.validation_data_size = validation_data_size self.cv_split_column_names = cv_split_column_names self.validation_type = validation_type class AetherVsoBuildArtifactInfo(msrest.serialization.Model): """AetherVsoBuildArtifactInfo. :ivar build_info: :vartype build_info: ~flow.models.AetherVsoBuildInfo :ivar download_url: :vartype download_url: str """ _attribute_map = { 'build_info': {'key': 'buildInfo', 'type': 'AetherVsoBuildInfo'}, 'download_url': {'key': 'downloadUrl', 'type': 'str'}, } def __init__( self, *, build_info: Optional["AetherVsoBuildInfo"] = None, download_url: Optional[str] = None, **kwargs ): """ :keyword build_info: :paramtype build_info: ~flow.models.AetherVsoBuildInfo :keyword download_url: :paramtype download_url: str """ super(AetherVsoBuildArtifactInfo, self).__init__(**kwargs) self.build_info = build_info self.download_url = download_url class AetherVsoBuildDefinitionInfo(msrest.serialization.Model): """AetherVsoBuildDefinitionInfo. :ivar account_name: :vartype account_name: str :ivar project_id: :vartype project_id: str :ivar build_definition_id: :vartype build_definition_id: int """ _attribute_map = { 'account_name': {'key': 'accountName', 'type': 'str'}, 'project_id': {'key': 'projectId', 'type': 'str'}, 'build_definition_id': {'key': 'buildDefinitionId', 'type': 'int'}, } def __init__( self, *, account_name: Optional[str] = None, project_id: Optional[str] = None, build_definition_id: Optional[int] = None, **kwargs ): """ :keyword account_name: :paramtype account_name: str :keyword project_id: :paramtype project_id: str :keyword build_definition_id: :paramtype build_definition_id: int """ super(AetherVsoBuildDefinitionInfo, self).__init__(**kwargs) self.account_name = account_name self.project_id = project_id self.build_definition_id = build_definition_id class AetherVsoBuildInfo(msrest.serialization.Model): """AetherVsoBuildInfo. :ivar definition_info: :vartype definition_info: ~flow.models.AetherVsoBuildDefinitionInfo :ivar build_id: :vartype build_id: int """ _attribute_map = { 'definition_info': {'key': 'definitionInfo', 'type': 'AetherVsoBuildDefinitionInfo'}, 'build_id': {'key': 'buildId', 'type': 'int'}, } def __init__( self, *, definition_info: Optional["AetherVsoBuildDefinitionInfo"] = None, build_id: Optional[int] = None, **kwargs ): """ :keyword definition_info: :paramtype definition_info: ~flow.models.AetherVsoBuildDefinitionInfo :keyword build_id: :paramtype build_id: int """ super(AetherVsoBuildInfo, self).__init__(**kwargs) self.definition_info = definition_info self.build_id = build_id class AEVAComputeConfiguration(msrest.serialization.Model): """AEVAComputeConfiguration. :ivar target: :vartype target: str :ivar instance_count: :vartype instance_count: int :ivar is_local: :vartype is_local: bool :ivar location: :vartype location: str :ivar is_clusterless: :vartype is_clusterless: bool :ivar instance_type: :vartype instance_type: str :ivar properties: Dictionary of :code:`<any>`. :vartype properties: dict[str, any] :ivar is_preemptable: :vartype is_preemptable: bool """ _attribute_map = { 'target': {'key': 'target', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'is_local': {'key': 'isLocal', 'type': 'bool'}, 'location': {'key': 'location', 'type': 'str'}, 'is_clusterless': {'key': 'isClusterless', 'type': 'bool'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{object}'}, 'is_preemptable': {'key': 'isPreemptable', 'type': 'bool'}, } def __init__( self, *, target: Optional[str] = None, instance_count: Optional[int] = None, is_local: Optional[bool] = None, location: Optional[str] = None, is_clusterless: Optional[bool] = None, instance_type: Optional[str] = None, properties: Optional[Dict[str, Any]] = None, is_preemptable: Optional[bool] = None, **kwargs ): """ :keyword target: :paramtype target: str :keyword instance_count: :paramtype instance_count: int :keyword is_local: :paramtype is_local: bool :keyword location: :paramtype location: str :keyword is_clusterless: :paramtype is_clusterless: bool :keyword instance_type: :paramtype instance_type: str :keyword properties: Dictionary of :code:`<any>`. :paramtype properties: dict[str, any] :keyword is_preemptable: :paramtype is_preemptable: bool """ super(AEVAComputeConfiguration, self).__init__(**kwargs) self.target = target self.instance_count = instance_count self.is_local = is_local self.location = location self.is_clusterless = is_clusterless self.instance_type = instance_type self.properties = properties self.is_preemptable = is_preemptable class AEVAResourceConfiguration(msrest.serialization.Model): """AEVAResourceConfiguration. :ivar instance_count: :vartype instance_count: int :ivar instance_type: :vartype instance_type: str :ivar properties: Dictionary of :code:`<any>`. :vartype properties: dict[str, any] :ivar locations: :vartype locations: list[str] :ivar instance_priority: :vartype instance_priority: str :ivar quota_enforcement_resource_id: :vartype quota_enforcement_resource_id: str """ _attribute_map = { 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{object}'}, 'locations': {'key': 'locations', 'type': '[str]'}, 'instance_priority': {'key': 'instancePriority', 'type': 'str'}, 'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'}, } def __init__( self, *, instance_count: Optional[int] = None, instance_type: Optional[str] = None, properties: Optional[Dict[str, Any]] = None, locations: Optional[List[str]] = None, instance_priority: Optional[str] = None, quota_enforcement_resource_id: Optional[str] = None, **kwargs ): """ :keyword instance_count: :paramtype instance_count: int :keyword instance_type: :paramtype instance_type: str :keyword properties: Dictionary of :code:`<any>`. :paramtype properties: dict[str, any] :keyword locations: :paramtype locations: list[str] :keyword instance_priority: :paramtype instance_priority: str :keyword quota_enforcement_resource_id: :paramtype quota_enforcement_resource_id: str """ super(AEVAResourceConfiguration, self).__init__(**kwargs) self.instance_count = instance_count self.instance_type = instance_type self.properties = properties self.locations = locations self.instance_priority = instance_priority self.quota_enforcement_resource_id = quota_enforcement_resource_id class AISuperComputerConfiguration(msrest.serialization.Model): """AISuperComputerConfiguration. :ivar instance_type: :vartype instance_type: str :ivar instance_types: :vartype instance_types: list[str] :ivar image_version: :vartype image_version: str :ivar location: :vartype location: str :ivar locations: :vartype locations: list[str] :ivar ai_super_computer_storage_data: Dictionary of :code:`<AISuperComputerStorageReferenceConfiguration>`. :vartype ai_super_computer_storage_data: dict[str, ~flow.models.AISuperComputerStorageReferenceConfiguration] :ivar interactive: :vartype interactive: bool :ivar scale_policy: :vartype scale_policy: ~flow.models.AISuperComputerScalePolicy :ivar virtual_cluster_arm_id: :vartype virtual_cluster_arm_id: str :ivar tensorboard_log_directory: :vartype tensorboard_log_directory: str :ivar ssh_public_key: :vartype ssh_public_key: str :ivar ssh_public_keys: :vartype ssh_public_keys: list[str] :ivar enable_azml_int: :vartype enable_azml_int: bool :ivar priority: :vartype priority: str :ivar sla_tier: :vartype sla_tier: str :ivar suspend_on_idle_time_hours: :vartype suspend_on_idle_time_hours: long :ivar user_alias: :vartype user_alias: str :ivar quota_enforcement_resource_id: :vartype quota_enforcement_resource_id: str :ivar model_compute_specification_id: :vartype model_compute_specification_id: str :ivar group_policy_name: :vartype group_policy_name: str """ _attribute_map = { 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'instance_types': {'key': 'instanceTypes', 'type': '[str]'}, 'image_version': {'key': 'imageVersion', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'locations': {'key': 'locations', 'type': '[str]'}, 'ai_super_computer_storage_data': {'key': 'aiSuperComputerStorageData', 'type': '{AISuperComputerStorageReferenceConfiguration}'}, 'interactive': {'key': 'interactive', 'type': 'bool'}, 'scale_policy': {'key': 'scalePolicy', 'type': 'AISuperComputerScalePolicy'}, 'virtual_cluster_arm_id': {'key': 'virtualClusterArmId', 'type': 'str'}, 'tensorboard_log_directory': {'key': 'tensorboardLogDirectory', 'type': 'str'}, 'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'}, 'ssh_public_keys': {'key': 'sshPublicKeys', 'type': '[str]'}, 'enable_azml_int': {'key': 'enableAzmlInt', 'type': 'bool'}, 'priority': {'key': 'priority', 'type': 'str'}, 'sla_tier': {'key': 'slaTier', 'type': 'str'}, 'suspend_on_idle_time_hours': {'key': 'suspendOnIdleTimeHours', 'type': 'long'}, 'user_alias': {'key': 'userAlias', 'type': 'str'}, 'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'}, 'model_compute_specification_id': {'key': 'modelComputeSpecificationId', 'type': 'str'}, 'group_policy_name': {'key': 'groupPolicyName', 'type': 'str'}, } def __init__( self, *, instance_type: Optional[str] = None, instance_types: Optional[List[str]] = None, image_version: Optional[str] = None, location: Optional[str] = None, locations: Optional[List[str]] = None, ai_super_computer_storage_data: Optional[Dict[str, "AISuperComputerStorageReferenceConfiguration"]] = None, interactive: Optional[bool] = None, scale_policy: Optional["AISuperComputerScalePolicy"] = None, virtual_cluster_arm_id: Optional[str] = None, tensorboard_log_directory: Optional[str] = None, ssh_public_key: Optional[str] = None, ssh_public_keys: Optional[List[str]] = None, enable_azml_int: Optional[bool] = None, priority: Optional[str] = None, sla_tier: Optional[str] = None, suspend_on_idle_time_hours: Optional[int] = None, user_alias: Optional[str] = None, quota_enforcement_resource_id: Optional[str] = None, model_compute_specification_id: Optional[str] = None, group_policy_name: Optional[str] = None, **kwargs ): """ :keyword instance_type: :paramtype instance_type: str :keyword instance_types: :paramtype instance_types: list[str] :keyword image_version: :paramtype image_version: str :keyword location: :paramtype location: str :keyword locations: :paramtype locations: list[str] :keyword ai_super_computer_storage_data: Dictionary of :code:`<AISuperComputerStorageReferenceConfiguration>`. :paramtype ai_super_computer_storage_data: dict[str, ~flow.models.AISuperComputerStorageReferenceConfiguration] :keyword interactive: :paramtype interactive: bool :keyword scale_policy: :paramtype scale_policy: ~flow.models.AISuperComputerScalePolicy :keyword virtual_cluster_arm_id: :paramtype virtual_cluster_arm_id: str :keyword tensorboard_log_directory: :paramtype tensorboard_log_directory: str :keyword ssh_public_key: :paramtype ssh_public_key: str :keyword ssh_public_keys: :paramtype ssh_public_keys: list[str] :keyword enable_azml_int: :paramtype enable_azml_int: bool :keyword priority: :paramtype priority: str :keyword sla_tier: :paramtype sla_tier: str :keyword suspend_on_idle_time_hours: :paramtype suspend_on_idle_time_hours: long :keyword user_alias: :paramtype user_alias: str :keyword quota_enforcement_resource_id: :paramtype quota_enforcement_resource_id: str :keyword model_compute_specification_id: :paramtype model_compute_specification_id: str :keyword group_policy_name: :paramtype group_policy_name: str """ super(AISuperComputerConfiguration, self).__init__(**kwargs) self.instance_type = instance_type self.instance_types = instance_types self.image_version = image_version self.location = location self.locations = locations self.ai_super_computer_storage_data = ai_super_computer_storage_data self.interactive = interactive self.scale_policy = scale_policy self.virtual_cluster_arm_id = virtual_cluster_arm_id self.tensorboard_log_directory = tensorboard_log_directory self.ssh_public_key = ssh_public_key self.ssh_public_keys = ssh_public_keys self.enable_azml_int = enable_azml_int self.priority = priority self.sla_tier = sla_tier self.suspend_on_idle_time_hours = suspend_on_idle_time_hours self.user_alias = user_alias self.quota_enforcement_resource_id = quota_enforcement_resource_id self.model_compute_specification_id = model_compute_specification_id self.group_policy_name = group_policy_name class AISuperComputerScalePolicy(msrest.serialization.Model): """AISuperComputerScalePolicy. :ivar auto_scale_instance_type_count_set: :vartype auto_scale_instance_type_count_set: list[int] :ivar auto_scale_interval_in_sec: :vartype auto_scale_interval_in_sec: int :ivar max_instance_type_count: :vartype max_instance_type_count: int :ivar min_instance_type_count: :vartype min_instance_type_count: int """ _attribute_map = { 'auto_scale_instance_type_count_set': {'key': 'autoScaleInstanceTypeCountSet', 'type': '[int]'}, 'auto_scale_interval_in_sec': {'key': 'autoScaleIntervalInSec', 'type': 'int'}, 'max_instance_type_count': {'key': 'maxInstanceTypeCount', 'type': 'int'}, 'min_instance_type_count': {'key': 'minInstanceTypeCount', 'type': 'int'}, } def __init__( self, *, auto_scale_instance_type_count_set: Optional[List[int]] = None, auto_scale_interval_in_sec: Optional[int] = None, max_instance_type_count: Optional[int] = None, min_instance_type_count: Optional[int] = None, **kwargs ): """ :keyword auto_scale_instance_type_count_set: :paramtype auto_scale_instance_type_count_set: list[int] :keyword auto_scale_interval_in_sec: :paramtype auto_scale_interval_in_sec: int :keyword max_instance_type_count: :paramtype max_instance_type_count: int :keyword min_instance_type_count: :paramtype min_instance_type_count: int """ super(AISuperComputerScalePolicy, self).__init__(**kwargs) self.auto_scale_instance_type_count_set = auto_scale_instance_type_count_set self.auto_scale_interval_in_sec = auto_scale_interval_in_sec self.max_instance_type_count = max_instance_type_count self.min_instance_type_count = min_instance_type_count class AISuperComputerStorageReferenceConfiguration(msrest.serialization.Model): """AISuperComputerStorageReferenceConfiguration. :ivar container_name: :vartype container_name: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'container_name': {'key': 'containerName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, *, container_name: Optional[str] = None, relative_path: Optional[str] = None, **kwargs ): """ :keyword container_name: :paramtype container_name: str :keyword relative_path: :paramtype relative_path: str """ super(AISuperComputerStorageReferenceConfiguration, self).__init__(**kwargs) self.container_name = container_name self.relative_path = relative_path class AKSAdvanceSettings(msrest.serialization.Model): """AKSAdvanceSettings. :ivar auto_scaler: :vartype auto_scaler: ~flow.models.AutoScaler :ivar container_resource_requirements: :vartype container_resource_requirements: ~flow.models.ContainerResourceRequirements :ivar app_insights_enabled: :vartype app_insights_enabled: bool :ivar scoring_timeout_ms: :vartype scoring_timeout_ms: int :ivar num_replicas: :vartype num_replicas: int """ _attribute_map = { 'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'}, 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'}, 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'}, 'num_replicas': {'key': 'numReplicas', 'type': 'int'}, } def __init__( self, *, auto_scaler: Optional["AutoScaler"] = None, container_resource_requirements: Optional["ContainerResourceRequirements"] = None, app_insights_enabled: Optional[bool] = None, scoring_timeout_ms: Optional[int] = None, num_replicas: Optional[int] = None, **kwargs ): """ :keyword auto_scaler: :paramtype auto_scaler: ~flow.models.AutoScaler :keyword container_resource_requirements: :paramtype container_resource_requirements: ~flow.models.ContainerResourceRequirements :keyword app_insights_enabled: :paramtype app_insights_enabled: bool :keyword scoring_timeout_ms: :paramtype scoring_timeout_ms: int :keyword num_replicas: :paramtype num_replicas: int """ super(AKSAdvanceSettings, self).__init__(**kwargs) self.auto_scaler = auto_scaler self.container_resource_requirements = container_resource_requirements self.app_insights_enabled = app_insights_enabled self.scoring_timeout_ms = scoring_timeout_ms self.num_replicas = num_replicas class AKSReplicaStatus(msrest.serialization.Model): """AKSReplicaStatus. :ivar desired_replicas: :vartype desired_replicas: int :ivar updated_replicas: :vartype updated_replicas: int :ivar available_replicas: :vartype available_replicas: int :ivar error: :vartype error: ~flow.models.ModelManagementErrorResponse """ _attribute_map = { 'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'}, 'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'}, 'available_replicas': {'key': 'availableReplicas', 'type': 'int'}, 'error': {'key': 'error', 'type': 'ModelManagementErrorResponse'}, } def __init__( self, *, desired_replicas: Optional[int] = None, updated_replicas: Optional[int] = None, available_replicas: Optional[int] = None, error: Optional["ModelManagementErrorResponse"] = None, **kwargs ): """ :keyword desired_replicas: :paramtype desired_replicas: int :keyword updated_replicas: :paramtype updated_replicas: int :keyword available_replicas: :paramtype available_replicas: int :keyword error: :paramtype error: ~flow.models.ModelManagementErrorResponse """ super(AKSReplicaStatus, self).__init__(**kwargs) self.desired_replicas = desired_replicas self.updated_replicas = updated_replicas self.available_replicas = available_replicas self.error = error class AMLComputeConfiguration(msrest.serialization.Model): """AMLComputeConfiguration. :ivar name: :vartype name: str :ivar vm_size: :vartype vm_size: str :ivar vm_priority: Possible values include: "Dedicated", "Lowpriority". :vartype vm_priority: str or ~flow.models.VmPriority :ivar retain_cluster: :vartype retain_cluster: bool :ivar cluster_max_node_count: :vartype cluster_max_node_count: int :ivar os_type: :vartype os_type: str :ivar virtual_machine_image: :vartype virtual_machine_image: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, 'retain_cluster': {'key': 'retainCluster', 'type': 'bool'}, 'cluster_max_node_count': {'key': 'clusterMaxNodeCount', 'type': 'int'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, vm_size: Optional[str] = None, vm_priority: Optional[Union[str, "VmPriority"]] = None, retain_cluster: Optional[bool] = None, cluster_max_node_count: Optional[int] = None, os_type: Optional[str] = None, virtual_machine_image: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword vm_size: :paramtype vm_size: str :keyword vm_priority: Possible values include: "Dedicated", "Lowpriority". :paramtype vm_priority: str or ~flow.models.VmPriority :keyword retain_cluster: :paramtype retain_cluster: bool :keyword cluster_max_node_count: :paramtype cluster_max_node_count: int :keyword os_type: :paramtype os_type: str :keyword virtual_machine_image: :paramtype virtual_machine_image: str """ super(AMLComputeConfiguration, self).__init__(**kwargs) self.name = name self.vm_size = vm_size self.vm_priority = vm_priority self.retain_cluster = retain_cluster self.cluster_max_node_count = cluster_max_node_count self.os_type = os_type self.virtual_machine_image = virtual_machine_image class AmlDataset(msrest.serialization.Model): """AmlDataset. :ivar registered_data_set_reference: :vartype registered_data_set_reference: ~flow.models.RegisteredDataSetReference :ivar saved_data_set_reference: :vartype saved_data_set_reference: ~flow.models.SavedDataSetReference :ivar additional_transformations: :vartype additional_transformations: str """ _attribute_map = { 'registered_data_set_reference': {'key': 'registeredDataSetReference', 'type': 'RegisteredDataSetReference'}, 'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'SavedDataSetReference'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, *, registered_data_set_reference: Optional["RegisteredDataSetReference"] = None, saved_data_set_reference: Optional["SavedDataSetReference"] = None, additional_transformations: Optional[str] = None, **kwargs ): """ :keyword registered_data_set_reference: :paramtype registered_data_set_reference: ~flow.models.RegisteredDataSetReference :keyword saved_data_set_reference: :paramtype saved_data_set_reference: ~flow.models.SavedDataSetReference :keyword additional_transformations: :paramtype additional_transformations: str """ super(AmlDataset, self).__init__(**kwargs) self.registered_data_set_reference = registered_data_set_reference self.saved_data_set_reference = saved_data_set_reference self.additional_transformations = additional_transformations class AmlK8SConfiguration(msrest.serialization.Model): """AmlK8SConfiguration. :ivar resource_configuration: :vartype resource_configuration: ~flow.models.ResourceConfiguration :ivar priority_configuration: :vartype priority_configuration: ~flow.models.AmlK8SPriorityConfiguration :ivar interactive_configuration: :vartype interactive_configuration: ~flow.models.InteractiveConfiguration """ _attribute_map = { 'resource_configuration': {'key': 'resourceConfiguration', 'type': 'ResourceConfiguration'}, 'priority_configuration': {'key': 'priorityConfiguration', 'type': 'AmlK8SPriorityConfiguration'}, 'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'InteractiveConfiguration'}, } def __init__( self, *, resource_configuration: Optional["ResourceConfiguration"] = None, priority_configuration: Optional["AmlK8SPriorityConfiguration"] = None, interactive_configuration: Optional["InteractiveConfiguration"] = None, **kwargs ): """ :keyword resource_configuration: :paramtype resource_configuration: ~flow.models.ResourceConfiguration :keyword priority_configuration: :paramtype priority_configuration: ~flow.models.AmlK8SPriorityConfiguration :keyword interactive_configuration: :paramtype interactive_configuration: ~flow.models.InteractiveConfiguration """ super(AmlK8SConfiguration, self).__init__(**kwargs) self.resource_configuration = resource_configuration self.priority_configuration = priority_configuration self.interactive_configuration = interactive_configuration class AmlK8SPriorityConfiguration(msrest.serialization.Model): """AmlK8SPriorityConfiguration. :ivar job_priority: :vartype job_priority: int :ivar is_preemptible: :vartype is_preemptible: bool :ivar node_count_set: :vartype node_count_set: list[int] :ivar scale_interval: :vartype scale_interval: int """ _attribute_map = { 'job_priority': {'key': 'jobPriority', 'type': 'int'}, 'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'}, 'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'}, 'scale_interval': {'key': 'scaleInterval', 'type': 'int'}, } def __init__( self, *, job_priority: Optional[int] = None, is_preemptible: Optional[bool] = None, node_count_set: Optional[List[int]] = None, scale_interval: Optional[int] = None, **kwargs ): """ :keyword job_priority: :paramtype job_priority: int :keyword is_preemptible: :paramtype is_preemptible: bool :keyword node_count_set: :paramtype node_count_set: list[int] :keyword scale_interval: :paramtype scale_interval: int """ super(AmlK8SPriorityConfiguration, self).__init__(**kwargs) self.job_priority = job_priority self.is_preemptible = is_preemptible self.node_count_set = node_count_set self.scale_interval = scale_interval class AmlSparkCloudSetting(msrest.serialization.Model): """AmlSparkCloudSetting. :ivar entry: :vartype entry: ~flow.models.EntrySetting :ivar files: :vartype files: list[str] :ivar archives: :vartype archives: list[str] :ivar jars: :vartype jars: list[str] :ivar py_files: :vartype py_files: list[str] :ivar driver_memory: :vartype driver_memory: str :ivar driver_cores: :vartype driver_cores: int :ivar executor_memory: :vartype executor_memory: str :ivar executor_cores: :vartype executor_cores: int :ivar number_executors: :vartype number_executors: int :ivar environment_asset_id: :vartype environment_asset_id: str :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar inline_environment_definition_string: :vartype inline_environment_definition_string: str :ivar conf: Dictionary of :code:`<string>`. :vartype conf: dict[str, str] :ivar compute: :vartype compute: str :ivar resources: :vartype resources: ~flow.models.ResourcesSetting :ivar identity: :vartype identity: ~flow.models.IdentitySetting """ _attribute_map = { 'entry': {'key': 'entry', 'type': 'EntrySetting'}, 'files': {'key': 'files', 'type': '[str]'}, 'archives': {'key': 'archives', 'type': '[str]'}, 'jars': {'key': 'jars', 'type': '[str]'}, 'py_files': {'key': 'pyFiles', 'type': '[str]'}, 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, 'driver_cores': {'key': 'driverCores', 'type': 'int'}, 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, 'executor_cores': {'key': 'executorCores', 'type': 'int'}, 'number_executors': {'key': 'numberExecutors', 'type': 'int'}, 'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'inline_environment_definition_string': {'key': 'inlineEnvironmentDefinitionString', 'type': 'str'}, 'conf': {'key': 'conf', 'type': '{str}'}, 'compute': {'key': 'compute', 'type': 'str'}, 'resources': {'key': 'resources', 'type': 'ResourcesSetting'}, 'identity': {'key': 'identity', 'type': 'IdentitySetting'}, } def __init__( self, *, entry: Optional["EntrySetting"] = None, files: Optional[List[str]] = None, archives: Optional[List[str]] = None, jars: Optional[List[str]] = None, py_files: Optional[List[str]] = None, driver_memory: Optional[str] = None, driver_cores: Optional[int] = None, executor_memory: Optional[str] = None, executor_cores: Optional[int] = None, number_executors: Optional[int] = None, environment_asset_id: Optional[str] = None, environment_variables: Optional[Dict[str, str]] = None, inline_environment_definition_string: Optional[str] = None, conf: Optional[Dict[str, str]] = None, compute: Optional[str] = None, resources: Optional["ResourcesSetting"] = None, identity: Optional["IdentitySetting"] = None, **kwargs ): """ :keyword entry: :paramtype entry: ~flow.models.EntrySetting :keyword files: :paramtype files: list[str] :keyword archives: :paramtype archives: list[str] :keyword jars: :paramtype jars: list[str] :keyword py_files: :paramtype py_files: list[str] :keyword driver_memory: :paramtype driver_memory: str :keyword driver_cores: :paramtype driver_cores: int :keyword executor_memory: :paramtype executor_memory: str :keyword executor_cores: :paramtype executor_cores: int :keyword number_executors: :paramtype number_executors: int :keyword environment_asset_id: :paramtype environment_asset_id: str :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword inline_environment_definition_string: :paramtype inline_environment_definition_string: str :keyword conf: Dictionary of :code:`<string>`. :paramtype conf: dict[str, str] :keyword compute: :paramtype compute: str :keyword resources: :paramtype resources: ~flow.models.ResourcesSetting :keyword identity: :paramtype identity: ~flow.models.IdentitySetting """ super(AmlSparkCloudSetting, self).__init__(**kwargs) self.entry = entry self.files = files self.archives = archives self.jars = jars self.py_files = py_files self.driver_memory = driver_memory self.driver_cores = driver_cores self.executor_memory = executor_memory self.executor_cores = executor_cores self.number_executors = number_executors self.environment_asset_id = environment_asset_id self.environment_variables = environment_variables self.inline_environment_definition_string = inline_environment_definition_string self.conf = conf self.compute = compute self.resources = resources self.identity = identity class APCloudConfiguration(msrest.serialization.Model): """APCloudConfiguration. :ivar referenced_ap_module_guid: :vartype referenced_ap_module_guid: str :ivar user_alias: :vartype user_alias: str :ivar aether_module_type: :vartype aether_module_type: str """ _attribute_map = { 'referenced_ap_module_guid': {'key': 'referencedAPModuleGuid', 'type': 'str'}, 'user_alias': {'key': 'userAlias', 'type': 'str'}, 'aether_module_type': {'key': 'aetherModuleType', 'type': 'str'}, } def __init__( self, *, referenced_ap_module_guid: Optional[str] = None, user_alias: Optional[str] = None, aether_module_type: Optional[str] = None, **kwargs ): """ :keyword referenced_ap_module_guid: :paramtype referenced_ap_module_guid: str :keyword user_alias: :paramtype user_alias: str :keyword aether_module_type: :paramtype aether_module_type: str """ super(APCloudConfiguration, self).__init__(**kwargs) self.referenced_ap_module_guid = referenced_ap_module_guid self.user_alias = user_alias self.aether_module_type = aether_module_type class ApiAndParameters(msrest.serialization.Model): """ApiAndParameters. :ivar api: :vartype api: str :ivar parameters: This is a dictionary. :vartype parameters: dict[str, ~flow.models.FlowToolSettingParameter] :ivar default_prompt: :vartype default_prompt: str """ _attribute_map = { 'api': {'key': 'api', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{FlowToolSettingParameter}'}, 'default_prompt': {'key': 'default_prompt', 'type': 'str'}, } def __init__( self, *, api: Optional[str] = None, parameters: Optional[Dict[str, "FlowToolSettingParameter"]] = None, default_prompt: Optional[str] = None, **kwargs ): """ :keyword api: :paramtype api: str :keyword parameters: This is a dictionary. :paramtype parameters: dict[str, ~flow.models.FlowToolSettingParameter] :keyword default_prompt: :paramtype default_prompt: str """ super(ApiAndParameters, self).__init__(**kwargs) self.api = api self.parameters = parameters self.default_prompt = default_prompt class ApplicationEndpointConfiguration(msrest.serialization.Model): """ApplicationEndpointConfiguration. :ivar type: Possible values include: "Jupyter", "JupyterLab", "SSH", "TensorBoard", "VSCode", "Theia", "Grafana", "Custom", "RayDashboard". :vartype type: str or ~flow.models.ApplicationEndpointType :ivar port: :vartype port: int :ivar properties: Dictionary of :code:`<string>`. :vartype properties: dict[str, str] :ivar nodes: :vartype nodes: ~flow.models.Nodes """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'port': {'key': 'port', 'type': 'int'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'nodes': {'key': 'nodes', 'type': 'Nodes'}, } def __init__( self, *, type: Optional[Union[str, "ApplicationEndpointType"]] = None, port: Optional[int] = None, properties: Optional[Dict[str, str]] = None, nodes: Optional["Nodes"] = None, **kwargs ): """ :keyword type: Possible values include: "Jupyter", "JupyterLab", "SSH", "TensorBoard", "VSCode", "Theia", "Grafana", "Custom", "RayDashboard". :paramtype type: str or ~flow.models.ApplicationEndpointType :keyword port: :paramtype port: int :keyword properties: Dictionary of :code:`<string>`. :paramtype properties: dict[str, str] :keyword nodes: :paramtype nodes: ~flow.models.Nodes """ super(ApplicationEndpointConfiguration, self).__init__(**kwargs) self.type = type self.port = port self.properties = properties self.nodes = nodes class ArgumentAssignment(msrest.serialization.Model): """ArgumentAssignment. :ivar value_type: Possible values include: "Literal", "Parameter", "Input", "Output", "NestedList", "StringInterpolationList". :vartype value_type: str or ~flow.models.ArgumentValueType :ivar value: :vartype value: str :ivar nested_argument_list: :vartype nested_argument_list: list[~flow.models.ArgumentAssignment] :ivar string_interpolation_argument_list: :vartype string_interpolation_argument_list: list[~flow.models.ArgumentAssignment] """ _attribute_map = { 'value_type': {'key': 'valueType', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'nested_argument_list': {'key': 'nestedArgumentList', 'type': '[ArgumentAssignment]'}, 'string_interpolation_argument_list': {'key': 'stringInterpolationArgumentList', 'type': '[ArgumentAssignment]'}, } def __init__( self, *, value_type: Optional[Union[str, "ArgumentValueType"]] = None, value: Optional[str] = None, nested_argument_list: Optional[List["ArgumentAssignment"]] = None, string_interpolation_argument_list: Optional[List["ArgumentAssignment"]] = None, **kwargs ): """ :keyword value_type: Possible values include: "Literal", "Parameter", "Input", "Output", "NestedList", "StringInterpolationList". :paramtype value_type: str or ~flow.models.ArgumentValueType :keyword value: :paramtype value: str :keyword nested_argument_list: :paramtype nested_argument_list: list[~flow.models.ArgumentAssignment] :keyword string_interpolation_argument_list: :paramtype string_interpolation_argument_list: list[~flow.models.ArgumentAssignment] """ super(ArgumentAssignment, self).__init__(**kwargs) self.value_type = value_type self.value = value self.nested_argument_list = nested_argument_list self.string_interpolation_argument_list = string_interpolation_argument_list class Asset(msrest.serialization.Model): """Asset. :ivar asset_id: :vartype asset_id: str :ivar type: :vartype type: str """ _attribute_map = { 'asset_id': {'key': 'assetId', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, *, asset_id: Optional[str] = None, type: Optional[str] = None, **kwargs ): """ :keyword asset_id: :paramtype asset_id: str :keyword type: :paramtype type: str """ super(Asset, self).__init__(**kwargs) self.asset_id = asset_id self.type = type class AssetDefinition(msrest.serialization.Model): """AssetDefinition. :ivar path: :vartype path: str :ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :vartype type: str or ~flow.models.AEVAAssetType :ivar asset_id: :vartype asset_id: str :ivar serialized_asset_id: :vartype serialized_asset_id: str """ _attribute_map = { 'path': {'key': 'path', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, 'serialized_asset_id': {'key': 'serializedAssetId', 'type': 'str'}, } def __init__( self, *, path: Optional[str] = None, type: Optional[Union[str, "AEVAAssetType"]] = None, asset_id: Optional[str] = None, serialized_asset_id: Optional[str] = None, **kwargs ): """ :keyword path: :paramtype path: str :keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :paramtype type: str or ~flow.models.AEVAAssetType :keyword asset_id: :paramtype asset_id: str :keyword serialized_asset_id: :paramtype serialized_asset_id: str """ super(AssetDefinition, self).__init__(**kwargs) self.path = path self.type = type self.asset_id = asset_id self.serialized_asset_id = serialized_asset_id class AssetNameAndVersionIdentifier(msrest.serialization.Model): """AssetNameAndVersionIdentifier. :ivar asset_name: :vartype asset_name: str :ivar version: :vartype version: str :ivar feed_name: :vartype feed_name: str """ _attribute_map = { 'asset_name': {'key': 'assetName', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'feed_name': {'key': 'feedName', 'type': 'str'}, } def __init__( self, *, asset_name: Optional[str] = None, version: Optional[str] = None, feed_name: Optional[str] = None, **kwargs ): """ :keyword asset_name: :paramtype asset_name: str :keyword version: :paramtype version: str :keyword feed_name: :paramtype feed_name: str """ super(AssetNameAndVersionIdentifier, self).__init__(**kwargs) self.asset_name = asset_name self.version = version self.feed_name = feed_name class AssetOutputSettings(msrest.serialization.Model): """AssetOutputSettings. :ivar path: :vartype path: str :ivar path_parameter_assignment: :vartype path_parameter_assignment: ~flow.models.ParameterAssignment :ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :vartype type: str or ~flow.models.AEVAAssetType :ivar options: This is a dictionary. :vartype options: dict[str, str] :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode :ivar name: :vartype name: str :ivar version: :vartype version: str """ _attribute_map = { 'path': {'key': 'path', 'type': 'str'}, 'path_parameter_assignment': {'key': 'PathParameterAssignment', 'type': 'ParameterAssignment'}, 'type': {'key': 'type', 'type': 'str'}, 'options': {'key': 'options', 'type': '{str}'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, *, path: Optional[str] = None, path_parameter_assignment: Optional["ParameterAssignment"] = None, type: Optional[Union[str, "AEVAAssetType"]] = None, options: Optional[Dict[str, str]] = None, data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None, name: Optional[str] = None, version: Optional[str] = None, **kwargs ): """ :keyword path: :paramtype path: str :keyword path_parameter_assignment: :paramtype path_parameter_assignment: ~flow.models.ParameterAssignment :keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel", "OpenAIModel". :paramtype type: str or ~flow.models.AEVAAssetType :keyword options: This is a dictionary. :paramtype options: dict[str, str] :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode :keyword name: :paramtype name: str :keyword version: :paramtype version: str """ super(AssetOutputSettings, self).__init__(**kwargs) self.path = path self.path_parameter_assignment = path_parameter_assignment self.type = type self.options = options self.data_store_mode = data_store_mode self.name = name self.version = version class AssetOutputSettingsParameter(msrest.serialization.Model): """AssetOutputSettingsParameter. :ivar name: :vartype name: str :ivar documentation: :vartype documentation: str :ivar default_value: :vartype default_value: ~flow.models.AssetOutputSettings """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'documentation': {'key': 'documentation', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'AssetOutputSettings'}, } def __init__( self, *, name: Optional[str] = None, documentation: Optional[str] = None, default_value: Optional["AssetOutputSettings"] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword documentation: :paramtype documentation: str :keyword default_value: :paramtype default_value: ~flow.models.AssetOutputSettings """ super(AssetOutputSettingsParameter, self).__init__(**kwargs) self.name = name self.documentation = documentation self.default_value = default_value class AssetPublishResult(msrest.serialization.Model): """AssetPublishResult. :ivar feed_name: :vartype feed_name: str :ivar asset_name: :vartype asset_name: str :ivar asset_version: :vartype asset_version: str :ivar step_name: :vartype step_name: str :ivar status: :vartype status: str :ivar error_message: :vartype error_message: str :ivar created_time: :vartype created_time: ~datetime.datetime :ivar last_updated_time: :vartype last_updated_time: ~datetime.datetime :ivar regional_publish_results: Dictionary of :code:`<AssetPublishSingleRegionResult>`. :vartype regional_publish_results: dict[str, ~flow.models.AssetPublishSingleRegionResult] """ _attribute_map = { 'feed_name': {'key': 'feedName', 'type': 'str'}, 'asset_name': {'key': 'assetName', 'type': 'str'}, 'asset_version': {'key': 'assetVersion', 'type': 'str'}, 'step_name': {'key': 'stepName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'error_message': {'key': 'errorMessage', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'last_updated_time': {'key': 'lastUpdatedTime', 'type': 'iso-8601'}, 'regional_publish_results': {'key': 'regionalPublishResults', 'type': '{AssetPublishSingleRegionResult}'}, } def __init__( self, *, feed_name: Optional[str] = None, asset_name: Optional[str] = None, asset_version: Optional[str] = None, step_name: Optional[str] = None, status: Optional[str] = None, error_message: Optional[str] = None, created_time: Optional[datetime.datetime] = None, last_updated_time: Optional[datetime.datetime] = None, regional_publish_results: Optional[Dict[str, "AssetPublishSingleRegionResult"]] = None, **kwargs ): """ :keyword feed_name: :paramtype feed_name: str :keyword asset_name: :paramtype asset_name: str :keyword asset_version: :paramtype asset_version: str :keyword step_name: :paramtype step_name: str :keyword status: :paramtype status: str :keyword error_message: :paramtype error_message: str :keyword created_time: :paramtype created_time: ~datetime.datetime :keyword last_updated_time: :paramtype last_updated_time: ~datetime.datetime :keyword regional_publish_results: Dictionary of :code:`<AssetPublishSingleRegionResult>`. :paramtype regional_publish_results: dict[str, ~flow.models.AssetPublishSingleRegionResult] """ super(AssetPublishResult, self).__init__(**kwargs) self.feed_name = feed_name self.asset_name = asset_name self.asset_version = asset_version self.step_name = step_name self.status = status self.error_message = error_message self.created_time = created_time self.last_updated_time = last_updated_time self.regional_publish_results = regional_publish_results class AssetPublishSingleRegionResult(msrest.serialization.Model): """AssetPublishSingleRegionResult. :ivar step_name: :vartype step_name: str :ivar status: :vartype status: str :ivar error_message: :vartype error_message: str :ivar last_updated_time: :vartype last_updated_time: ~datetime.datetime :ivar total_steps: :vartype total_steps: int :ivar finished_steps: :vartype finished_steps: int :ivar remaining_steps: :vartype remaining_steps: int """ _attribute_map = { 'step_name': {'key': 'stepName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'error_message': {'key': 'errorMessage', 'type': 'str'}, 'last_updated_time': {'key': 'lastUpdatedTime', 'type': 'iso-8601'}, 'total_steps': {'key': 'totalSteps', 'type': 'int'}, 'finished_steps': {'key': 'finishedSteps', 'type': 'int'}, 'remaining_steps': {'key': 'remainingSteps', 'type': 'int'}, } def __init__( self, *, step_name: Optional[str] = None, status: Optional[str] = None, error_message: Optional[str] = None, last_updated_time: Optional[datetime.datetime] = None, total_steps: Optional[int] = None, finished_steps: Optional[int] = None, remaining_steps: Optional[int] = None, **kwargs ): """ :keyword step_name: :paramtype step_name: str :keyword status: :paramtype status: str :keyword error_message: :paramtype error_message: str :keyword last_updated_time: :paramtype last_updated_time: ~datetime.datetime :keyword total_steps: :paramtype total_steps: int :keyword finished_steps: :paramtype finished_steps: int :keyword remaining_steps: :paramtype remaining_steps: int """ super(AssetPublishSingleRegionResult, self).__init__(**kwargs) self.step_name = step_name self.status = status self.error_message = error_message self.last_updated_time = last_updated_time self.total_steps = total_steps self.finished_steps = finished_steps self.remaining_steps = remaining_steps class AssetTypeMetaInfo(msrest.serialization.Model): """AssetTypeMetaInfo. :ivar consumption_mode: Possible values include: "Reference", "Copy", "CopyAndAutoUpgrade". :vartype consumption_mode: str or ~flow.models.ConsumeMode """ _attribute_map = { 'consumption_mode': {'key': 'consumptionMode', 'type': 'str'}, } def __init__( self, *, consumption_mode: Optional[Union[str, "ConsumeMode"]] = None, **kwargs ): """ :keyword consumption_mode: Possible values include: "Reference", "Copy", "CopyAndAutoUpgrade". :paramtype consumption_mode: str or ~flow.models.ConsumeMode """ super(AssetTypeMetaInfo, self).__init__(**kwargs) self.consumption_mode = consumption_mode class AssetVersionPublishRequest(msrest.serialization.Model): """AssetVersionPublishRequest. :ivar asset_type: Possible values include: "Component", "Model", "Environment", "Dataset", "DataStore", "SampleGraph", "FlowTool", "FlowToolSetting", "FlowConnection", "FlowSample", "FlowRuntimeSpec". :vartype asset_type: str or ~flow.models.AssetType :ivar asset_source_type: Possible values include: "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip". :vartype asset_source_type: str or ~flow.models.AssetSourceType :ivar yaml_file: :vartype yaml_file: str :ivar source_zip_url: :vartype source_zip_url: str :ivar source_zip_file: :vartype source_zip_file: IO :ivar feed_name: :vartype feed_name: str :ivar set_as_default_version: :vartype set_as_default_version: bool :ivar referenced_assets: :vartype referenced_assets: list[~flow.models.AssetNameAndVersionIdentifier] :ivar flow_file: :vartype flow_file: str :ivar version: :vartype version: str """ _attribute_map = { 'asset_type': {'key': 'assetType', 'type': 'str'}, 'asset_source_type': {'key': 'assetSourceType', 'type': 'str'}, 'yaml_file': {'key': 'yamlFile', 'type': 'str'}, 'source_zip_url': {'key': 'sourceZipUrl', 'type': 'str'}, 'source_zip_file': {'key': 'sourceZipFile', 'type': 'IO'}, 'feed_name': {'key': 'feedName', 'type': 'str'}, 'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'}, 'referenced_assets': {'key': 'referencedAssets', 'type': '[AssetNameAndVersionIdentifier]'}, 'flow_file': {'key': 'flowFile', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, *, asset_type: Optional[Union[str, "AssetType"]] = None, asset_source_type: Optional[Union[str, "AssetSourceType"]] = None, yaml_file: Optional[str] = None, source_zip_url: Optional[str] = None, source_zip_file: Optional[IO] = None, feed_name: Optional[str] = None, set_as_default_version: Optional[bool] = None, referenced_assets: Optional[List["AssetNameAndVersionIdentifier"]] = None, flow_file: Optional[str] = None, version: Optional[str] = None, **kwargs ): """ :keyword asset_type: Possible values include: "Component", "Model", "Environment", "Dataset", "DataStore", "SampleGraph", "FlowTool", "FlowToolSetting", "FlowConnection", "FlowSample", "FlowRuntimeSpec". :paramtype asset_type: str or ~flow.models.AssetType :keyword asset_source_type: Possible values include: "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip". :paramtype asset_source_type: str or ~flow.models.AssetSourceType :keyword yaml_file: :paramtype yaml_file: str :keyword source_zip_url: :paramtype source_zip_url: str :keyword source_zip_file: :paramtype source_zip_file: IO :keyword feed_name: :paramtype feed_name: str :keyword set_as_default_version: :paramtype set_as_default_version: bool :keyword referenced_assets: :paramtype referenced_assets: list[~flow.models.AssetNameAndVersionIdentifier] :keyword flow_file: :paramtype flow_file: str :keyword version: :paramtype version: str """ super(AssetVersionPublishRequest, self).__init__(**kwargs) self.asset_type = asset_type self.asset_source_type = asset_source_type self.yaml_file = yaml_file self.source_zip_url = source_zip_url self.source_zip_file = source_zip_file self.feed_name = feed_name self.set_as_default_version = set_as_default_version self.referenced_assets = referenced_assets self.flow_file = flow_file self.version = version class AssignedUser(msrest.serialization.Model): """AssignedUser. :ivar object_id: :vartype object_id: str :ivar tenant_id: :vartype tenant_id: str """ _attribute_map = { 'object_id': {'key': 'objectId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } def __init__( self, *, object_id: Optional[str] = None, tenant_id: Optional[str] = None, **kwargs ): """ :keyword object_id: :paramtype object_id: str :keyword tenant_id: :paramtype tenant_id: str """ super(AssignedUser, self).__init__(**kwargs) self.object_id = object_id self.tenant_id = tenant_id class AuthKeys(msrest.serialization.Model): """AuthKeys. :ivar primary_key: :vartype primary_key: str :ivar secondary_key: :vartype secondary_key: str """ _attribute_map = { 'primary_key': {'key': 'primaryKey', 'type': 'str'}, 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, } def __init__( self, *, primary_key: Optional[str] = None, secondary_key: Optional[str] = None, **kwargs ): """ :keyword primary_key: :paramtype primary_key: str :keyword secondary_key: :paramtype secondary_key: str """ super(AuthKeys, self).__init__(**kwargs) self.primary_key = primary_key self.secondary_key = secondary_key class AutoClusterComputeSpecification(msrest.serialization.Model): """AutoClusterComputeSpecification. :ivar instance_size: :vartype instance_size: str :ivar instance_priority: :vartype instance_priority: str :ivar os_type: :vartype os_type: str :ivar location: :vartype location: str :ivar runtime_version: :vartype runtime_version: str :ivar quota_enforcement_resource_id: :vartype quota_enforcement_resource_id: str :ivar model_compute_specification_id: :vartype model_compute_specification_id: str """ _attribute_map = { 'instance_size': {'key': 'instanceSize', 'type': 'str'}, 'instance_priority': {'key': 'instancePriority', 'type': 'str'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, 'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'}, 'model_compute_specification_id': {'key': 'modelComputeSpecificationId', 'type': 'str'}, } def __init__( self, *, instance_size: Optional[str] = None, instance_priority: Optional[str] = None, os_type: Optional[str] = None, location: Optional[str] = None, runtime_version: Optional[str] = None, quota_enforcement_resource_id: Optional[str] = None, model_compute_specification_id: Optional[str] = None, **kwargs ): """ :keyword instance_size: :paramtype instance_size: str :keyword instance_priority: :paramtype instance_priority: str :keyword os_type: :paramtype os_type: str :keyword location: :paramtype location: str :keyword runtime_version: :paramtype runtime_version: str :keyword quota_enforcement_resource_id: :paramtype quota_enforcement_resource_id: str :keyword model_compute_specification_id: :paramtype model_compute_specification_id: str """ super(AutoClusterComputeSpecification, self).__init__(**kwargs) self.instance_size = instance_size self.instance_priority = instance_priority self.os_type = os_type self.location = location self.runtime_version = runtime_version self.quota_enforcement_resource_id = quota_enforcement_resource_id self.model_compute_specification_id = model_compute_specification_id class AutoDeleteSetting(msrest.serialization.Model): """AutoDeleteSetting. :ivar condition: Possible values include: "CreatedGreaterThan", "LastAccessedGreaterThan". :vartype condition: str or ~flow.models.AutoDeleteCondition :ivar value: :vartype value: str """ _attribute_map = { 'condition': {'key': 'condition', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, *, condition: Optional[Union[str, "AutoDeleteCondition"]] = None, value: Optional[str] = None, **kwargs ): """ :keyword condition: Possible values include: "CreatedGreaterThan", "LastAccessedGreaterThan". :paramtype condition: str or ~flow.models.AutoDeleteCondition :keyword value: :paramtype value: str """ super(AutoDeleteSetting, self).__init__(**kwargs) self.condition = condition self.value = value class AutoFeaturizeConfiguration(msrest.serialization.Model): """AutoFeaturizeConfiguration. :ivar featurization_config: :vartype featurization_config: ~flow.models.FeaturizationSettings """ _attribute_map = { 'featurization_config': {'key': 'featurizationConfig', 'type': 'FeaturizationSettings'}, } def __init__( self, *, featurization_config: Optional["FeaturizationSettings"] = None, **kwargs ): """ :keyword featurization_config: :paramtype featurization_config: ~flow.models.FeaturizationSettings """ super(AutoFeaturizeConfiguration, self).__init__(**kwargs) self.featurization_config = featurization_config class AutologgerSettings(msrest.serialization.Model): """AutologgerSettings. :ivar ml_flow_autologger: Possible values include: "Enabled", "Disabled". :vartype ml_flow_autologger: str or ~flow.models.MLFlowAutologgerState """ _attribute_map = { 'ml_flow_autologger': {'key': 'mlFlowAutologger', 'type': 'str'}, } def __init__( self, *, ml_flow_autologger: Optional[Union[str, "MLFlowAutologgerState"]] = None, **kwargs ): """ :keyword ml_flow_autologger: Possible values include: "Enabled", "Disabled". :paramtype ml_flow_autologger: str or ~flow.models.MLFlowAutologgerState """ super(AutologgerSettings, self).__init__(**kwargs) self.ml_flow_autologger = ml_flow_autologger class AutoMLComponentConfiguration(msrest.serialization.Model): """AutoMLComponentConfiguration. :ivar auto_train_config: :vartype auto_train_config: ~flow.models.AutoTrainConfiguration :ivar auto_featurize_config: :vartype auto_featurize_config: ~flow.models.AutoFeaturizeConfiguration """ _attribute_map = { 'auto_train_config': {'key': 'autoTrainConfig', 'type': 'AutoTrainConfiguration'}, 'auto_featurize_config': {'key': 'autoFeaturizeConfig', 'type': 'AutoFeaturizeConfiguration'}, } def __init__( self, *, auto_train_config: Optional["AutoTrainConfiguration"] = None, auto_featurize_config: Optional["AutoFeaturizeConfiguration"] = None, **kwargs ): """ :keyword auto_train_config: :paramtype auto_train_config: ~flow.models.AutoTrainConfiguration :keyword auto_featurize_config: :paramtype auto_featurize_config: ~flow.models.AutoFeaturizeConfiguration """ super(AutoMLComponentConfiguration, self).__init__(**kwargs) self.auto_train_config = auto_train_config self.auto_featurize_config = auto_featurize_config class AutoScaler(msrest.serialization.Model): """AutoScaler. :ivar autoscale_enabled: :vartype autoscale_enabled: bool :ivar min_replicas: :vartype min_replicas: int :ivar max_replicas: :vartype max_replicas: int :ivar target_utilization: :vartype target_utilization: int :ivar refresh_period_in_seconds: :vartype refresh_period_in_seconds: int """ _attribute_map = { 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'}, 'min_replicas': {'key': 'minReplicas', 'type': 'int'}, 'max_replicas': {'key': 'maxReplicas', 'type': 'int'}, 'target_utilization': {'key': 'targetUtilization', 'type': 'int'}, 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'}, } def __init__( self, *, autoscale_enabled: Optional[bool] = None, min_replicas: Optional[int] = None, max_replicas: Optional[int] = None, target_utilization: Optional[int] = None, refresh_period_in_seconds: Optional[int] = None, **kwargs ): """ :keyword autoscale_enabled: :paramtype autoscale_enabled: bool :keyword min_replicas: :paramtype min_replicas: int :keyword max_replicas: :paramtype max_replicas: int :keyword target_utilization: :paramtype target_utilization: int :keyword refresh_period_in_seconds: :paramtype refresh_period_in_seconds: int """ super(AutoScaler, self).__init__(**kwargs) self.autoscale_enabled = autoscale_enabled self.min_replicas = min_replicas self.max_replicas = max_replicas self.target_utilization = target_utilization self.refresh_period_in_seconds = refresh_period_in_seconds class AutoTrainConfiguration(msrest.serialization.Model): """AutoTrainConfiguration. :ivar general_settings: :vartype general_settings: ~flow.models.GeneralSettings :ivar limit_settings: :vartype limit_settings: ~flow.models.LimitSettings :ivar data_settings: :vartype data_settings: ~flow.models.DataSettings :ivar forecasting_settings: :vartype forecasting_settings: ~flow.models.ForecastingSettings :ivar training_settings: :vartype training_settings: ~flow.models.TrainingSettings :ivar sweep_settings: :vartype sweep_settings: ~flow.models.SweepSettings :ivar image_model_settings: Dictionary of :code:`<any>`. :vartype image_model_settings: dict[str, any] :ivar properties: Dictionary of :code:`<string>`. :vartype properties: dict[str, str] :ivar compute_configuration: :vartype compute_configuration: ~flow.models.AEVAComputeConfiguration :ivar resource_configurtion: :vartype resource_configurtion: ~flow.models.AEVAResourceConfiguration :ivar environment_id: :vartype environment_id: str :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] """ _attribute_map = { 'general_settings': {'key': 'generalSettings', 'type': 'GeneralSettings'}, 'limit_settings': {'key': 'limitSettings', 'type': 'LimitSettings'}, 'data_settings': {'key': 'dataSettings', 'type': 'DataSettings'}, 'forecasting_settings': {'key': 'forecastingSettings', 'type': 'ForecastingSettings'}, 'training_settings': {'key': 'trainingSettings', 'type': 'TrainingSettings'}, 'sweep_settings': {'key': 'sweepSettings', 'type': 'SweepSettings'}, 'image_model_settings': {'key': 'imageModelSettings', 'type': '{object}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'compute_configuration': {'key': 'computeConfiguration', 'type': 'AEVAComputeConfiguration'}, 'resource_configurtion': {'key': 'resourceConfigurtion', 'type': 'AEVAResourceConfiguration'}, 'environment_id': {'key': 'environmentId', 'type': 'str'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, } def __init__( self, *, general_settings: Optional["GeneralSettings"] = None, limit_settings: Optional["LimitSettings"] = None, data_settings: Optional["DataSettings"] = None, forecasting_settings: Optional["ForecastingSettings"] = None, training_settings: Optional["TrainingSettings"] = None, sweep_settings: Optional["SweepSettings"] = None, image_model_settings: Optional[Dict[str, Any]] = None, properties: Optional[Dict[str, str]] = None, compute_configuration: Optional["AEVAComputeConfiguration"] = None, resource_configurtion: Optional["AEVAResourceConfiguration"] = None, environment_id: Optional[str] = None, environment_variables: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword general_settings: :paramtype general_settings: ~flow.models.GeneralSettings :keyword limit_settings: :paramtype limit_settings: ~flow.models.LimitSettings :keyword data_settings: :paramtype data_settings: ~flow.models.DataSettings :keyword forecasting_settings: :paramtype forecasting_settings: ~flow.models.ForecastingSettings :keyword training_settings: :paramtype training_settings: ~flow.models.TrainingSettings :keyword sweep_settings: :paramtype sweep_settings: ~flow.models.SweepSettings :keyword image_model_settings: Dictionary of :code:`<any>`. :paramtype image_model_settings: dict[str, any] :keyword properties: Dictionary of :code:`<string>`. :paramtype properties: dict[str, str] :keyword compute_configuration: :paramtype compute_configuration: ~flow.models.AEVAComputeConfiguration :keyword resource_configurtion: :paramtype resource_configurtion: ~flow.models.AEVAResourceConfiguration :keyword environment_id: :paramtype environment_id: str :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] """ super(AutoTrainConfiguration, self).__init__(**kwargs) self.general_settings = general_settings self.limit_settings = limit_settings self.data_settings = data_settings self.forecasting_settings = forecasting_settings self.training_settings = training_settings self.sweep_settings = sweep_settings self.image_model_settings = image_model_settings self.properties = properties self.compute_configuration = compute_configuration self.resource_configurtion = resource_configurtion self.environment_id = environment_id self.environment_variables = environment_variables class AvailabilityResponse(msrest.serialization.Model): """AvailabilityResponse. :ivar is_available: :vartype is_available: bool :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse """ _attribute_map = { 'is_available': {'key': 'isAvailable', 'type': 'bool'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, } def __init__( self, *, is_available: Optional[bool] = None, error: Optional["ErrorResponse"] = None, **kwargs ): """ :keyword is_available: :paramtype is_available: bool :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse """ super(AvailabilityResponse, self).__init__(**kwargs) self.is_available = is_available self.error = error class AzureBlobReference(msrest.serialization.Model): """AzureBlobReference. :ivar container: :vartype container: str :ivar sas_token: :vartype sas_token: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'container': {'key': 'container', 'type': 'str'}, 'sas_token': {'key': 'sasToken', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, *, container: Optional[str] = None, sas_token: Optional[str] = None, uri: Optional[str] = None, account: Optional[str] = None, relative_path: Optional[str] = None, aml_data_store_name: Optional[str] = None, **kwargs ): """ :keyword container: :paramtype container: str :keyword sas_token: :paramtype sas_token: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AzureBlobReference, self).__init__(**kwargs) self.container = container self.sas_token = sas_token self.uri = uri self.account = account self.relative_path = relative_path self.aml_data_store_name = aml_data_store_name class AzureDatabaseReference(msrest.serialization.Model): """AzureDatabaseReference. :ivar table_name: :vartype table_name: str :ivar sql_query: :vartype sql_query: str :ivar stored_procedure_name: :vartype stored_procedure_name: str :ivar stored_procedure_parameters: :vartype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter] :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'table_name': {'key': 'tableName', 'type': 'str'}, 'sql_query': {'key': 'sqlQuery', 'type': 'str'}, 'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[StoredProcedureParameter]'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, *, table_name: Optional[str] = None, sql_query: Optional[str] = None, stored_procedure_name: Optional[str] = None, stored_procedure_parameters: Optional[List["StoredProcedureParameter"]] = None, aml_data_store_name: Optional[str] = None, **kwargs ): """ :keyword table_name: :paramtype table_name: str :keyword sql_query: :paramtype sql_query: str :keyword stored_procedure_name: :paramtype stored_procedure_name: str :keyword stored_procedure_parameters: :paramtype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter] :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AzureDatabaseReference, self).__init__(**kwargs) self.table_name = table_name self.sql_query = sql_query self.stored_procedure_name = stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.aml_data_store_name = aml_data_store_name class AzureDataLakeGen2Reference(msrest.serialization.Model): """AzureDataLakeGen2Reference. :ivar file_system_name: :vartype file_system_name: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'file_system_name': {'key': 'fileSystemName', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, *, file_system_name: Optional[str] = None, uri: Optional[str] = None, account: Optional[str] = None, relative_path: Optional[str] = None, aml_data_store_name: Optional[str] = None, **kwargs ): """ :keyword file_system_name: :paramtype file_system_name: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AzureDataLakeGen2Reference, self).__init__(**kwargs) self.file_system_name = file_system_name self.uri = uri self.account = account self.relative_path = relative_path self.aml_data_store_name = aml_data_store_name class AzureDataLakeReference(msrest.serialization.Model): """AzureDataLakeReference. :ivar tenant: :vartype tenant: str :ivar subscription: :vartype subscription: str :ivar resource_group: :vartype resource_group: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'tenant': {'key': 'tenant', 'type': 'str'}, 'subscription': {'key': 'subscription', 'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, *, tenant: Optional[str] = None, subscription: Optional[str] = None, resource_group: Optional[str] = None, uri: Optional[str] = None, account: Optional[str] = None, relative_path: Optional[str] = None, aml_data_store_name: Optional[str] = None, **kwargs ): """ :keyword tenant: :paramtype tenant: str :keyword subscription: :paramtype subscription: str :keyword resource_group: :paramtype resource_group: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AzureDataLakeReference, self).__init__(**kwargs) self.tenant = tenant self.subscription = subscription self.resource_group = resource_group self.uri = uri self.account = account self.relative_path = relative_path self.aml_data_store_name = aml_data_store_name class AzureFilesReference(msrest.serialization.Model): """AzureFilesReference. :ivar share: :vartype share: str :ivar uri: :vartype uri: str :ivar account: :vartype account: str :ivar relative_path: :vartype relative_path: str :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'share': {'key': 'share', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, *, share: Optional[str] = None, uri: Optional[str] = None, account: Optional[str] = None, relative_path: Optional[str] = None, aml_data_store_name: Optional[str] = None, **kwargs ): """ :keyword share: :paramtype share: str :keyword uri: :paramtype uri: str :keyword account: :paramtype account: str :keyword relative_path: :paramtype relative_path: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(AzureFilesReference, self).__init__(**kwargs) self.share = share self.uri = uri self.account = account self.relative_path = relative_path self.aml_data_store_name = aml_data_store_name class AzureMLModuleVersionDescriptor(msrest.serialization.Model): """AzureMLModuleVersionDescriptor. :ivar module_version_id: :vartype module_version_id: str :ivar version: :vartype version: str """ _attribute_map = { 'module_version_id': {'key': 'moduleVersionId', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, *, module_version_id: Optional[str] = None, version: Optional[str] = None, **kwargs ): """ :keyword module_version_id: :paramtype module_version_id: str :keyword version: :paramtype version: str """ super(AzureMLModuleVersionDescriptor, self).__init__(**kwargs) self.module_version_id = module_version_id self.version = version class AzureOpenAIDeploymentDto(msrest.serialization.Model): """AzureOpenAIDeploymentDto. :ivar name: :vartype name: str :ivar model_name: :vartype model_name: str :ivar capabilities: :vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'model_name': {'key': 'modelName', 'type': 'str'}, 'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'}, } def __init__( self, *, name: Optional[str] = None, model_name: Optional[str] = None, capabilities: Optional["AzureOpenAIModelCapabilities"] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword model_name: :paramtype model_name: str :keyword capabilities: :paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities """ super(AzureOpenAIDeploymentDto, self).__init__(**kwargs) self.name = name self.model_name = model_name self.capabilities = capabilities class AzureOpenAIModelCapabilities(msrest.serialization.Model): """AzureOpenAIModelCapabilities. :ivar completion: :vartype completion: bool :ivar chat_completion: :vartype chat_completion: bool :ivar embeddings: :vartype embeddings: bool """ _attribute_map = { 'completion': {'key': 'completion', 'type': 'bool'}, 'chat_completion': {'key': 'chat_completion', 'type': 'bool'}, 'embeddings': {'key': 'embeddings', 'type': 'bool'}, } def __init__( self, *, completion: Optional[bool] = None, chat_completion: Optional[bool] = None, embeddings: Optional[bool] = None, **kwargs ): """ :keyword completion: :paramtype completion: bool :keyword chat_completion: :paramtype chat_completion: bool :keyword embeddings: :paramtype embeddings: bool """ super(AzureOpenAIModelCapabilities, self).__init__(**kwargs) self.completion = completion self.chat_completion = chat_completion self.embeddings = embeddings class BatchAiComputeInfo(msrest.serialization.Model): """BatchAiComputeInfo. :ivar batch_ai_subscription_id: :vartype batch_ai_subscription_id: str :ivar batch_ai_resource_group: :vartype batch_ai_resource_group: str :ivar batch_ai_workspace_name: :vartype batch_ai_workspace_name: str :ivar cluster_name: :vartype cluster_name: str :ivar native_shared_directory: :vartype native_shared_directory: str """ _attribute_map = { 'batch_ai_subscription_id': {'key': 'batchAiSubscriptionId', 'type': 'str'}, 'batch_ai_resource_group': {'key': 'batchAiResourceGroup', 'type': 'str'}, 'batch_ai_workspace_name': {'key': 'batchAiWorkspaceName', 'type': 'str'}, 'cluster_name': {'key': 'clusterName', 'type': 'str'}, 'native_shared_directory': {'key': 'nativeSharedDirectory', 'type': 'str'}, } def __init__( self, *, batch_ai_subscription_id: Optional[str] = None, batch_ai_resource_group: Optional[str] = None, batch_ai_workspace_name: Optional[str] = None, cluster_name: Optional[str] = None, native_shared_directory: Optional[str] = None, **kwargs ): """ :keyword batch_ai_subscription_id: :paramtype batch_ai_subscription_id: str :keyword batch_ai_resource_group: :paramtype batch_ai_resource_group: str :keyword batch_ai_workspace_name: :paramtype batch_ai_workspace_name: str :keyword cluster_name: :paramtype cluster_name: str :keyword native_shared_directory: :paramtype native_shared_directory: str """ super(BatchAiComputeInfo, self).__init__(**kwargs) self.batch_ai_subscription_id = batch_ai_subscription_id self.batch_ai_resource_group = batch_ai_resource_group self.batch_ai_workspace_name = batch_ai_workspace_name self.cluster_name = cluster_name self.native_shared_directory = native_shared_directory class BatchDataInput(msrest.serialization.Model): """BatchDataInput. :ivar data_uri: :vartype data_uri: str :ivar type: :vartype type: str """ _attribute_map = { 'data_uri': {'key': 'dataUri', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, *, data_uri: Optional[str] = None, type: Optional[str] = None, **kwargs ): """ :keyword data_uri: :paramtype data_uri: str :keyword type: :paramtype type: str """ super(BatchDataInput, self).__init__(**kwargs) self.data_uri = data_uri self.type = type class BatchExportComponentSpecResponse(msrest.serialization.Model): """BatchExportComponentSpecResponse. :ivar component_spec_meta_infos: :vartype component_spec_meta_infos: list[~flow.models.ComponentSpecMetaInfo] :ivar errors: :vartype errors: list[~flow.models.ErrorResponse] """ _attribute_map = { 'component_spec_meta_infos': {'key': 'componentSpecMetaInfos', 'type': '[ComponentSpecMetaInfo]'}, 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, } def __init__( self, *, component_spec_meta_infos: Optional[List["ComponentSpecMetaInfo"]] = None, errors: Optional[List["ErrorResponse"]] = None, **kwargs ): """ :keyword component_spec_meta_infos: :paramtype component_spec_meta_infos: list[~flow.models.ComponentSpecMetaInfo] :keyword errors: :paramtype errors: list[~flow.models.ErrorResponse] """ super(BatchExportComponentSpecResponse, self).__init__(**kwargs) self.component_spec_meta_infos = component_spec_meta_infos self.errors = errors class BatchExportRawComponentResponse(msrest.serialization.Model): """BatchExportRawComponentResponse. :ivar raw_component_dtos: :vartype raw_component_dtos: list[~flow.models.RawComponentDto] :ivar errors: :vartype errors: list[~flow.models.ErrorResponse] """ _attribute_map = { 'raw_component_dtos': {'key': 'rawComponentDtos', 'type': '[RawComponentDto]'}, 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, } def __init__( self, *, raw_component_dtos: Optional[List["RawComponentDto"]] = None, errors: Optional[List["ErrorResponse"]] = None, **kwargs ): """ :keyword raw_component_dtos: :paramtype raw_component_dtos: list[~flow.models.RawComponentDto] :keyword errors: :paramtype errors: list[~flow.models.ErrorResponse] """ super(BatchExportRawComponentResponse, self).__init__(**kwargs) self.raw_component_dtos = raw_component_dtos self.errors = errors class BatchGetComponentHashesRequest(msrest.serialization.Model): """BatchGetComponentHashesRequest. :ivar module_hash_version: Possible values include: "IdentifierHash", "IdentifierHashV2". :vartype module_hash_version: str or ~flow.models.AetherModuleHashVersion :ivar module_entities: Dictionary of :code:`<AetherModuleEntity>`. :vartype module_entities: dict[str, ~flow.models.AetherModuleEntity] """ _attribute_map = { 'module_hash_version': {'key': 'moduleHashVersion', 'type': 'str'}, 'module_entities': {'key': 'moduleEntities', 'type': '{AetherModuleEntity}'}, } def __init__( self, *, module_hash_version: Optional[Union[str, "AetherModuleHashVersion"]] = None, module_entities: Optional[Dict[str, "AetherModuleEntity"]] = None, **kwargs ): """ :keyword module_hash_version: Possible values include: "IdentifierHash", "IdentifierHashV2". :paramtype module_hash_version: str or ~flow.models.AetherModuleHashVersion :keyword module_entities: Dictionary of :code:`<AetherModuleEntity>`. :paramtype module_entities: dict[str, ~flow.models.AetherModuleEntity] """ super(BatchGetComponentHashesRequest, self).__init__(**kwargs) self.module_hash_version = module_hash_version self.module_entities = module_entities class BatchGetComponentRequest(msrest.serialization.Model): """BatchGetComponentRequest. :ivar version_ids: :vartype version_ids: list[str] :ivar name_and_versions: :vartype name_and_versions: list[~flow.models.ComponentNameMetaInfo] """ _attribute_map = { 'version_ids': {'key': 'versionIds', 'type': '[str]'}, 'name_and_versions': {'key': 'nameAndVersions', 'type': '[ComponentNameMetaInfo]'}, } def __init__( self, *, version_ids: Optional[List[str]] = None, name_and_versions: Optional[List["ComponentNameMetaInfo"]] = None, **kwargs ): """ :keyword version_ids: :paramtype version_ids: list[str] :keyword name_and_versions: :paramtype name_and_versions: list[~flow.models.ComponentNameMetaInfo] """ super(BatchGetComponentRequest, self).__init__(**kwargs) self.version_ids = version_ids self.name_and_versions = name_and_versions class Binding(msrest.serialization.Model): """Binding. :ivar binding_type: The only acceptable values to pass in are None and "Basic". The default value is None. :vartype binding_type: str """ _attribute_map = { 'binding_type': {'key': 'bindingType', 'type': 'str'}, } def __init__( self, *, binding_type: Optional[str] = None, **kwargs ): """ :keyword binding_type: The only acceptable values to pass in are None and "Basic". The default value is None. :paramtype binding_type: str """ super(Binding, self).__init__(**kwargs) self.binding_type = binding_type class BulkTestDto(msrest.serialization.Model): """BulkTestDto. :ivar bulk_test_id: :vartype bulk_test_id: str :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar runtime: :vartype runtime: str :ivar created_by: :vartype created_by: ~flow.models.SchemaContractsCreatedBy :ivar created_on: :vartype created_on: ~datetime.datetime :ivar evaluation_count: :vartype evaluation_count: int :ivar variant_count: :vartype variant_count: int :ivar flow_submit_run_settings: :vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.FlowInputDefinition] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.FlowOutputDefinition] :ivar batch_inputs: :vartype batch_inputs: list[dict[str, any]] :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput """ _attribute_map = { 'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'runtime': {'key': 'runtime', 'type': 'str'}, 'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'evaluation_count': {'key': 'evaluationCount', 'type': 'int'}, 'variant_count': {'key': 'variantCount', 'type': 'int'}, 'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'}, 'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'}, 'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'}, 'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, } def __init__( self, *, bulk_test_id: Optional[str] = None, display_name: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, runtime: Optional[str] = None, created_by: Optional["SchemaContractsCreatedBy"] = None, created_on: Optional[datetime.datetime] = None, evaluation_count: Optional[int] = None, variant_count: Optional[int] = None, flow_submit_run_settings: Optional["FlowSubmitRunSettings"] = None, inputs: Optional[Dict[str, "FlowInputDefinition"]] = None, outputs: Optional[Dict[str, "FlowOutputDefinition"]] = None, batch_inputs: Optional[List[Dict[str, Any]]] = None, batch_data_input: Optional["BatchDataInput"] = None, **kwargs ): """ :keyword bulk_test_id: :paramtype bulk_test_id: str :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword runtime: :paramtype runtime: str :keyword created_by: :paramtype created_by: ~flow.models.SchemaContractsCreatedBy :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword evaluation_count: :paramtype evaluation_count: int :keyword variant_count: :paramtype variant_count: int :keyword flow_submit_run_settings: :paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.FlowInputDefinition] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition] :keyword batch_inputs: :paramtype batch_inputs: list[dict[str, any]] :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput """ super(BulkTestDto, self).__init__(**kwargs) self.bulk_test_id = bulk_test_id self.display_name = display_name self.description = description self.tags = tags self.runtime = runtime self.created_by = created_by self.created_on = created_on self.evaluation_count = evaluation_count self.variant_count = variant_count self.flow_submit_run_settings = flow_submit_run_settings self.inputs = inputs self.outputs = outputs self.batch_inputs = batch_inputs self.batch_data_input = batch_data_input class CloudError(msrest.serialization.Model): """CloudError. Variables are only populated by the server, and will be ignored when sending a request. :ivar code: :vartype code: str :ivar message: :vartype message: str :ivar target: :vartype target: str :ivar details: :vartype details: list[~flow.models.CloudError] :ivar additional_info: :vartype additional_info: list[~flow.models.AdditionalErrorInfo] """ _validation = { 'details': {'readonly': True}, 'additional_info': {'readonly': True}, } _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[CloudError]'}, 'additional_info': {'key': 'additionalInfo', 'type': '[AdditionalErrorInfo]'}, } def __init__( self, *, code: Optional[str] = None, message: Optional[str] = None, target: Optional[str] = None, **kwargs ): """ :keyword code: :paramtype code: str :keyword message: :paramtype message: str :keyword target: :paramtype target: str """ super(CloudError, self).__init__(**kwargs) self.code = code self.message = message self.target = target self.details = None self.additional_info = None class CloudPrioritySetting(msrest.serialization.Model): """CloudPrioritySetting. :ivar scope_priority: :vartype scope_priority: ~flow.models.PriorityConfiguration :ivar aml_compute_priority: :vartype aml_compute_priority: ~flow.models.PriorityConfiguration :ivar itp_priority: :vartype itp_priority: ~flow.models.PriorityConfiguration :ivar singularity_priority: :vartype singularity_priority: ~flow.models.PriorityConfiguration """ _attribute_map = { 'scope_priority': {'key': 'scopePriority', 'type': 'PriorityConfiguration'}, 'aml_compute_priority': {'key': 'AmlComputePriority', 'type': 'PriorityConfiguration'}, 'itp_priority': {'key': 'ItpPriority', 'type': 'PriorityConfiguration'}, 'singularity_priority': {'key': 'SingularityPriority', 'type': 'PriorityConfiguration'}, } def __init__( self, *, scope_priority: Optional["PriorityConfiguration"] = None, aml_compute_priority: Optional["PriorityConfiguration"] = None, itp_priority: Optional["PriorityConfiguration"] = None, singularity_priority: Optional["PriorityConfiguration"] = None, **kwargs ): """ :keyword scope_priority: :paramtype scope_priority: ~flow.models.PriorityConfiguration :keyword aml_compute_priority: :paramtype aml_compute_priority: ~flow.models.PriorityConfiguration :keyword itp_priority: :paramtype itp_priority: ~flow.models.PriorityConfiguration :keyword singularity_priority: :paramtype singularity_priority: ~flow.models.PriorityConfiguration """ super(CloudPrioritySetting, self).__init__(**kwargs) self.scope_priority = scope_priority self.aml_compute_priority = aml_compute_priority self.itp_priority = itp_priority self.singularity_priority = singularity_priority class CloudSettings(msrest.serialization.Model): """CloudSettings. :ivar linked_settings: :vartype linked_settings: list[~flow.models.ParameterAssignment] :ivar priority_config: :vartype priority_config: ~flow.models.PriorityConfiguration :ivar hdi_run_config: :vartype hdi_run_config: ~flow.models.HdiRunConfiguration :ivar sub_graph_config: :vartype sub_graph_config: ~flow.models.SubGraphConfiguration :ivar auto_ml_component_config: :vartype auto_ml_component_config: ~flow.models.AutoMLComponentConfiguration :ivar ap_cloud_config: :vartype ap_cloud_config: ~flow.models.APCloudConfiguration :ivar scope_cloud_config: :vartype scope_cloud_config: ~flow.models.ScopeCloudConfiguration :ivar es_cloud_config: :vartype es_cloud_config: ~flow.models.EsCloudConfiguration :ivar data_transfer_cloud_config: :vartype data_transfer_cloud_config: ~flow.models.DataTransferCloudConfiguration :ivar aml_spark_cloud_setting: :vartype aml_spark_cloud_setting: ~flow.models.AmlSparkCloudSetting :ivar data_transfer_v2_cloud_setting: :vartype data_transfer_v2_cloud_setting: ~flow.models.DataTransferV2CloudSetting """ _attribute_map = { 'linked_settings': {'key': 'linkedSettings', 'type': '[ParameterAssignment]'}, 'priority_config': {'key': 'priorityConfig', 'type': 'PriorityConfiguration'}, 'hdi_run_config': {'key': 'hdiRunConfig', 'type': 'HdiRunConfiguration'}, 'sub_graph_config': {'key': 'subGraphConfig', 'type': 'SubGraphConfiguration'}, 'auto_ml_component_config': {'key': 'autoMLComponentConfig', 'type': 'AutoMLComponentConfiguration'}, 'ap_cloud_config': {'key': 'apCloudConfig', 'type': 'APCloudConfiguration'}, 'scope_cloud_config': {'key': 'scopeCloudConfig', 'type': 'ScopeCloudConfiguration'}, 'es_cloud_config': {'key': 'esCloudConfig', 'type': 'EsCloudConfiguration'}, 'data_transfer_cloud_config': {'key': 'dataTransferCloudConfig', 'type': 'DataTransferCloudConfiguration'}, 'aml_spark_cloud_setting': {'key': 'amlSparkCloudSetting', 'type': 'AmlSparkCloudSetting'}, 'data_transfer_v2_cloud_setting': {'key': 'dataTransferV2CloudSetting', 'type': 'DataTransferV2CloudSetting'}, } def __init__( self, *, linked_settings: Optional[List["ParameterAssignment"]] = None, priority_config: Optional["PriorityConfiguration"] = None, hdi_run_config: Optional["HdiRunConfiguration"] = None, sub_graph_config: Optional["SubGraphConfiguration"] = None, auto_ml_component_config: Optional["AutoMLComponentConfiguration"] = None, ap_cloud_config: Optional["APCloudConfiguration"] = None, scope_cloud_config: Optional["ScopeCloudConfiguration"] = None, es_cloud_config: Optional["EsCloudConfiguration"] = None, data_transfer_cloud_config: Optional["DataTransferCloudConfiguration"] = None, aml_spark_cloud_setting: Optional["AmlSparkCloudSetting"] = None, data_transfer_v2_cloud_setting: Optional["DataTransferV2CloudSetting"] = None, **kwargs ): """ :keyword linked_settings: :paramtype linked_settings: list[~flow.models.ParameterAssignment] :keyword priority_config: :paramtype priority_config: ~flow.models.PriorityConfiguration :keyword hdi_run_config: :paramtype hdi_run_config: ~flow.models.HdiRunConfiguration :keyword sub_graph_config: :paramtype sub_graph_config: ~flow.models.SubGraphConfiguration :keyword auto_ml_component_config: :paramtype auto_ml_component_config: ~flow.models.AutoMLComponentConfiguration :keyword ap_cloud_config: :paramtype ap_cloud_config: ~flow.models.APCloudConfiguration :keyword scope_cloud_config: :paramtype scope_cloud_config: ~flow.models.ScopeCloudConfiguration :keyword es_cloud_config: :paramtype es_cloud_config: ~flow.models.EsCloudConfiguration :keyword data_transfer_cloud_config: :paramtype data_transfer_cloud_config: ~flow.models.DataTransferCloudConfiguration :keyword aml_spark_cloud_setting: :paramtype aml_spark_cloud_setting: ~flow.models.AmlSparkCloudSetting :keyword data_transfer_v2_cloud_setting: :paramtype data_transfer_v2_cloud_setting: ~flow.models.DataTransferV2CloudSetting """ super(CloudSettings, self).__init__(**kwargs) self.linked_settings = linked_settings self.priority_config = priority_config self.hdi_run_config = hdi_run_config self.sub_graph_config = sub_graph_config self.auto_ml_component_config = auto_ml_component_config self.ap_cloud_config = ap_cloud_config self.scope_cloud_config = scope_cloud_config self.es_cloud_config = es_cloud_config self.data_transfer_cloud_config = data_transfer_cloud_config self.aml_spark_cloud_setting = aml_spark_cloud_setting self.data_transfer_v2_cloud_setting = data_transfer_v2_cloud_setting class ColumnTransformer(msrest.serialization.Model): """ColumnTransformer. :ivar fields: :vartype fields: list[str] :ivar parameters: Anything. :vartype parameters: any """ _attribute_map = { 'fields': {'key': 'fields', 'type': '[str]'}, 'parameters': {'key': 'parameters', 'type': 'object'}, } def __init__( self, *, fields: Optional[List[str]] = None, parameters: Optional[Any] = None, **kwargs ): """ :keyword fields: :paramtype fields: list[str] :keyword parameters: Anything. :paramtype parameters: any """ super(ColumnTransformer, self).__init__(**kwargs) self.fields = fields self.parameters = parameters class CommandJob(msrest.serialization.Model): """CommandJob. :ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :vartype job_type: str or ~flow.models.JobType :ivar code_id: :vartype code_id: str :ivar command: :vartype command: str :ivar environment_id: :vartype environment_id: str :ivar input_data_bindings: Dictionary of :code:`<InputDataBinding>`. :vartype input_data_bindings: dict[str, ~flow.models.InputDataBinding] :ivar output_data_bindings: Dictionary of :code:`<OutputDataBinding>`. :vartype output_data_bindings: dict[str, ~flow.models.OutputDataBinding] :ivar distribution: :vartype distribution: ~flow.models.DistributionConfiguration :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar parameters: Dictionary of :code:`<any>`. :vartype parameters: dict[str, any] :ivar autologger_settings: :vartype autologger_settings: ~flow.models.MfeInternalAutologgerSettings :ivar limits: :vartype limits: ~flow.models.CommandJobLimits :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled", "InProgress". :vartype provisioning_state: str or ~flow.models.JobProvisioningState :ivar parent_job_name: :vartype parent_job_name: str :ivar display_name: :vartype display_name: str :ivar experiment_name: :vartype experiment_name: str :ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". :vartype status: str or ~flow.models.JobStatus :ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`. :vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint] :ivar identity: :vartype identity: ~flow.models.MfeInternalIdentityConfiguration :ivar compute: :vartype compute: ~flow.models.ComputeConfiguration :ivar priority: :vartype priority: int :ivar output: :vartype output: ~flow.models.JobOutputArtifacts :ivar is_archived: :vartype is_archived: bool :ivar schedule: :vartype schedule: ~flow.models.ScheduleBase :ivar component_id: :vartype component_id: str :ivar notification_setting: :vartype notification_setting: ~flow.models.NotificationSetting :ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`. :vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration] :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _validation = { 'command': {'min_length': 1}, } _attribute_map = { 'job_type': {'key': 'jobType', 'type': 'str'}, 'code_id': {'key': 'codeId', 'type': 'str'}, 'command': {'key': 'command', 'type': 'str'}, 'environment_id': {'key': 'environmentId', 'type': 'str'}, 'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'}, 'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'}, 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, 'autologger_settings': {'key': 'autologgerSettings', 'type': 'MfeInternalAutologgerSettings'}, 'limits': {'key': 'limits', 'type': 'CommandJobLimits'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'parent_job_name': {'key': 'parentJobName', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, 'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'}, 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, 'priority': {'key': 'priority', 'type': 'int'}, 'output': {'key': 'output', 'type': 'JobOutputArtifacts'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'schedule': {'key': 'schedule', 'type': 'ScheduleBase'}, 'component_id': {'key': 'componentId', 'type': 'str'}, 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, *, job_type: Optional[Union[str, "JobType"]] = None, code_id: Optional[str] = None, command: Optional[str] = None, environment_id: Optional[str] = None, input_data_bindings: Optional[Dict[str, "InputDataBinding"]] = None, output_data_bindings: Optional[Dict[str, "OutputDataBinding"]] = None, distribution: Optional["DistributionConfiguration"] = None, environment_variables: Optional[Dict[str, str]] = None, parameters: Optional[Dict[str, Any]] = None, autologger_settings: Optional["MfeInternalAutologgerSettings"] = None, limits: Optional["CommandJobLimits"] = None, provisioning_state: Optional[Union[str, "JobProvisioningState"]] = None, parent_job_name: Optional[str] = None, display_name: Optional[str] = None, experiment_name: Optional[str] = None, status: Optional[Union[str, "JobStatus"]] = None, interaction_endpoints: Optional[Dict[str, "JobEndpoint"]] = None, identity: Optional["MfeInternalIdentityConfiguration"] = None, compute: Optional["ComputeConfiguration"] = None, priority: Optional[int] = None, output: Optional["JobOutputArtifacts"] = None, is_archived: Optional[bool] = None, schedule: Optional["ScheduleBase"] = None, component_id: Optional[str] = None, notification_setting: Optional["NotificationSetting"] = None, secrets_configuration: Optional[Dict[str, "MfeInternalSecretConfiguration"]] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :paramtype job_type: str or ~flow.models.JobType :keyword code_id: :paramtype code_id: str :keyword command: :paramtype command: str :keyword environment_id: :paramtype environment_id: str :keyword input_data_bindings: Dictionary of :code:`<InputDataBinding>`. :paramtype input_data_bindings: dict[str, ~flow.models.InputDataBinding] :keyword output_data_bindings: Dictionary of :code:`<OutputDataBinding>`. :paramtype output_data_bindings: dict[str, ~flow.models.OutputDataBinding] :keyword distribution: :paramtype distribution: ~flow.models.DistributionConfiguration :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword parameters: Dictionary of :code:`<any>`. :paramtype parameters: dict[str, any] :keyword autologger_settings: :paramtype autologger_settings: ~flow.models.MfeInternalAutologgerSettings :keyword limits: :paramtype limits: ~flow.models.CommandJobLimits :keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled", "InProgress". :paramtype provisioning_state: str or ~flow.models.JobProvisioningState :keyword parent_job_name: :paramtype parent_job_name: str :keyword display_name: :paramtype display_name: str :keyword experiment_name: :paramtype experiment_name: str :keyword status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". :paramtype status: str or ~flow.models.JobStatus :keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`. :paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint] :keyword identity: :paramtype identity: ~flow.models.MfeInternalIdentityConfiguration :keyword compute: :paramtype compute: ~flow.models.ComputeConfiguration :keyword priority: :paramtype priority: int :keyword output: :paramtype output: ~flow.models.JobOutputArtifacts :keyword is_archived: :paramtype is_archived: bool :keyword schedule: :paramtype schedule: ~flow.models.ScheduleBase :keyword component_id: :paramtype component_id: str :keyword notification_setting: :paramtype notification_setting: ~flow.models.NotificationSetting :keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`. :paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration] :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(CommandJob, self).__init__(**kwargs) self.job_type = job_type self.code_id = code_id self.command = command self.environment_id = environment_id self.input_data_bindings = input_data_bindings self.output_data_bindings = output_data_bindings self.distribution = distribution self.environment_variables = environment_variables self.parameters = parameters self.autologger_settings = autologger_settings self.limits = limits self.provisioning_state = provisioning_state self.parent_job_name = parent_job_name self.display_name = display_name self.experiment_name = experiment_name self.status = status self.interaction_endpoints = interaction_endpoints self.identity = identity self.compute = compute self.priority = priority self.output = output self.is_archived = is_archived self.schedule = schedule self.component_id = component_id self.notification_setting = notification_setting self.secrets_configuration = secrets_configuration self.description = description self.tags = tags self.properties = properties class CommandJobLimits(msrest.serialization.Model): """CommandJobLimits. :ivar job_limits_type: Possible values include: "Command", "Sweep". :vartype job_limits_type: str or ~flow.models.JobLimitsType :ivar timeout: :vartype timeout: str """ _attribute_map = { 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, 'timeout': {'key': 'timeout', 'type': 'str'}, } def __init__( self, *, job_limits_type: Optional[Union[str, "JobLimitsType"]] = None, timeout: Optional[str] = None, **kwargs ): """ :keyword job_limits_type: Possible values include: "Command", "Sweep". :paramtype job_limits_type: str or ~flow.models.JobLimitsType :keyword timeout: :paramtype timeout: str """ super(CommandJobLimits, self).__init__(**kwargs) self.job_limits_type = job_limits_type self.timeout = timeout class CommandReturnCodeConfig(msrest.serialization.Model): """CommandReturnCodeConfig. :ivar return_code: Possible values include: "Zero", "ZeroOrGreater". :vartype return_code: str or ~flow.models.SuccessfulCommandReturnCode :ivar successful_return_codes: :vartype successful_return_codes: list[int] """ _attribute_map = { 'return_code': {'key': 'returnCode', 'type': 'str'}, 'successful_return_codes': {'key': 'successfulReturnCodes', 'type': '[int]'}, } def __init__( self, *, return_code: Optional[Union[str, "SuccessfulCommandReturnCode"]] = None, successful_return_codes: Optional[List[int]] = None, **kwargs ): """ :keyword return_code: Possible values include: "Zero", "ZeroOrGreater". :paramtype return_code: str or ~flow.models.SuccessfulCommandReturnCode :keyword successful_return_codes: :paramtype successful_return_codes: list[int] """ super(CommandReturnCodeConfig, self).__init__(**kwargs) self.return_code = return_code self.successful_return_codes = successful_return_codes class ComponentConfiguration(msrest.serialization.Model): """ComponentConfiguration. :ivar component_identifier: :vartype component_identifier: str """ _attribute_map = { 'component_identifier': {'key': 'componentIdentifier', 'type': 'str'}, } def __init__( self, *, component_identifier: Optional[str] = None, **kwargs ): """ :keyword component_identifier: :paramtype component_identifier: str """ super(ComponentConfiguration, self).__init__(**kwargs) self.component_identifier = component_identifier class ComponentInput(msrest.serialization.Model): """ComponentInput. :ivar name: :vartype name: str :ivar optional: :vartype optional: bool :ivar description: :vartype description: str :ivar type: :vartype type: str :ivar default: :vartype default: str :ivar enum: :vartype enum: list[str] :ivar min: :vartype min: str :ivar max: :vartype max: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'optional': {'key': 'optional', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'default': {'key': 'default', 'type': 'str'}, 'enum': {'key': 'enum', 'type': '[str]'}, 'min': {'key': 'min', 'type': 'str'}, 'max': {'key': 'max', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, optional: Optional[bool] = None, description: Optional[str] = None, type: Optional[str] = None, default: Optional[str] = None, enum: Optional[List[str]] = None, min: Optional[str] = None, max: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword optional: :paramtype optional: bool :keyword description: :paramtype description: str :keyword type: :paramtype type: str :keyword default: :paramtype default: str :keyword enum: :paramtype enum: list[str] :keyword min: :paramtype min: str :keyword max: :paramtype max: str """ super(ComponentInput, self).__init__(**kwargs) self.name = name self.optional = optional self.description = description self.type = type self.default = default self.enum = enum self.min = min self.max = max class ComponentJob(msrest.serialization.Model): """ComponentJob. :ivar compute: :vartype compute: ~flow.models.ComputeConfiguration :ivar component_id: :vartype component_id: str :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.ComponentJobInput] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.ComponentJobOutput] """ _attribute_map = { 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, 'component_id': {'key': 'componentId', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '{ComponentJobInput}'}, 'outputs': {'key': 'outputs', 'type': '{ComponentJobOutput}'}, } def __init__( self, *, compute: Optional["ComputeConfiguration"] = None, component_id: Optional[str] = None, inputs: Optional[Dict[str, "ComponentJobInput"]] = None, outputs: Optional[Dict[str, "ComponentJobOutput"]] = None, **kwargs ): """ :keyword compute: :paramtype compute: ~flow.models.ComputeConfiguration :keyword component_id: :paramtype component_id: str :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.ComponentJobInput] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.ComponentJobOutput] """ super(ComponentJob, self).__init__(**kwargs) self.compute = compute self.component_id = component_id self.inputs = inputs self.outputs = outputs class ComponentJobInput(msrest.serialization.Model): """ComponentJobInput. :ivar data: :vartype data: ~flow.models.InputData :ivar input_binding: :vartype input_binding: str """ _attribute_map = { 'data': {'key': 'data', 'type': 'InputData'}, 'input_binding': {'key': 'inputBinding', 'type': 'str'}, } def __init__( self, *, data: Optional["InputData"] = None, input_binding: Optional[str] = None, **kwargs ): """ :keyword data: :paramtype data: ~flow.models.InputData :keyword input_binding: :paramtype input_binding: str """ super(ComponentJobInput, self).__init__(**kwargs) self.data = data self.input_binding = input_binding class ComponentJobOutput(msrest.serialization.Model): """ComponentJobOutput. :ivar data: :vartype data: ~flow.models.MfeInternalOutputData :ivar output_binding: :vartype output_binding: str """ _attribute_map = { 'data': {'key': 'data', 'type': 'MfeInternalOutputData'}, 'output_binding': {'key': 'outputBinding', 'type': 'str'}, } def __init__( self, *, data: Optional["MfeInternalOutputData"] = None, output_binding: Optional[str] = None, **kwargs ): """ :keyword data: :paramtype data: ~flow.models.MfeInternalOutputData :keyword output_binding: :paramtype output_binding: str """ super(ComponentJobOutput, self).__init__(**kwargs) self.data = data self.output_binding = output_binding class ComponentNameAndDefaultVersion(msrest.serialization.Model): """ComponentNameAndDefaultVersion. :ivar component_name: :vartype component_name: str :ivar version: :vartype version: str :ivar feed_name: :vartype feed_name: str :ivar registry_name: :vartype registry_name: str """ _attribute_map = { 'component_name': {'key': 'componentName', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'feed_name': {'key': 'feedName', 'type': 'str'}, 'registry_name': {'key': 'registryName', 'type': 'str'}, } def __init__( self, *, component_name: Optional[str] = None, version: Optional[str] = None, feed_name: Optional[str] = None, registry_name: Optional[str] = None, **kwargs ): """ :keyword component_name: :paramtype component_name: str :keyword version: :paramtype version: str :keyword feed_name: :paramtype feed_name: str :keyword registry_name: :paramtype registry_name: str """ super(ComponentNameAndDefaultVersion, self).__init__(**kwargs) self.component_name = component_name self.version = version self.feed_name = feed_name self.registry_name = registry_name class ComponentNameMetaInfo(msrest.serialization.Model): """ComponentNameMetaInfo. :ivar feed_name: :vartype feed_name: str :ivar component_name: :vartype component_name: str :ivar component_version: :vartype component_version: str :ivar registry_name: :vartype registry_name: str """ _attribute_map = { 'feed_name': {'key': 'feedName', 'type': 'str'}, 'component_name': {'key': 'componentName', 'type': 'str'}, 'component_version': {'key': 'componentVersion', 'type': 'str'}, 'registry_name': {'key': 'registryName', 'type': 'str'}, } def __init__( self, *, feed_name: Optional[str] = None, component_name: Optional[str] = None, component_version: Optional[str] = None, registry_name: Optional[str] = None, **kwargs ): """ :keyword feed_name: :paramtype feed_name: str :keyword component_name: :paramtype component_name: str :keyword component_version: :paramtype component_version: str :keyword registry_name: :paramtype registry_name: str """ super(ComponentNameMetaInfo, self).__init__(**kwargs) self.feed_name = feed_name self.component_name = component_name self.component_version = component_version self.registry_name = registry_name class ComponentOutput(msrest.serialization.Model): """ComponentOutput. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar type: :vartype type: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, description: Optional[str] = None, type: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword type: :paramtype type: str """ super(ComponentOutput, self).__init__(**kwargs) self.name = name self.description = description self.type = type class ComponentPreflightResult(msrest.serialization.Model): """ComponentPreflightResult. :ivar error_details: :vartype error_details: list[~flow.models.RootError] """ _attribute_map = { 'error_details': {'key': 'errorDetails', 'type': '[RootError]'}, } def __init__( self, *, error_details: Optional[List["RootError"]] = None, **kwargs ): """ :keyword error_details: :paramtype error_details: list[~flow.models.RootError] """ super(ComponentPreflightResult, self).__init__(**kwargs) self.error_details = error_details class ComponentSpecMetaInfo(msrest.serialization.Model): """ComponentSpecMetaInfo. :ivar component_spec: Anything. :vartype component_spec: any :ivar component_version: :vartype component_version: str :ivar is_anonymous: :vartype is_anonymous: bool :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar component_name: :vartype component_name: str :ivar description: :vartype description: str :ivar is_archived: :vartype is_archived: bool """ _attribute_map = { 'component_spec': {'key': 'componentSpec', 'type': 'object'}, 'component_version': {'key': 'componentVersion', 'type': 'str'}, 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'component_name': {'key': 'componentName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, } def __init__( self, *, component_spec: Optional[Any] = None, component_version: Optional[str] = None, is_anonymous: Optional[bool] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, component_name: Optional[str] = None, description: Optional[str] = None, is_archived: Optional[bool] = None, **kwargs ): """ :keyword component_spec: Anything. :paramtype component_spec: any :keyword component_version: :paramtype component_version: str :keyword is_anonymous: :paramtype is_anonymous: bool :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword component_name: :paramtype component_name: str :keyword description: :paramtype description: str :keyword is_archived: :paramtype is_archived: bool """ super(ComponentSpecMetaInfo, self).__init__(**kwargs) self.component_spec = component_spec self.component_version = component_version self.is_anonymous = is_anonymous self.properties = properties self.tags = tags self.component_name = component_name self.description = description self.is_archived = is_archived class ComponentUpdateRequest(msrest.serialization.Model): """ComponentUpdateRequest. :ivar original_module_entity: :vartype original_module_entity: ~flow.models.ModuleEntity :ivar update_module_entity: :vartype update_module_entity: ~flow.models.ModuleEntity :ivar module_name: :vartype module_name: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar overwrite_with_original_name_and_version: :vartype overwrite_with_original_name_and_version: bool :ivar snapshot_id: :vartype snapshot_id: str """ _attribute_map = { 'original_module_entity': {'key': 'originalModuleEntity', 'type': 'ModuleEntity'}, 'update_module_entity': {'key': 'updateModuleEntity', 'type': 'ModuleEntity'}, 'module_name': {'key': 'moduleName', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'overwrite_with_original_name_and_version': {'key': 'overwriteWithOriginalNameAndVersion', 'type': 'bool'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, } def __init__( self, *, original_module_entity: Optional["ModuleEntity"] = None, update_module_entity: Optional["ModuleEntity"] = None, module_name: Optional[str] = None, properties: Optional[Dict[str, str]] = None, overwrite_with_original_name_and_version: Optional[bool] = None, snapshot_id: Optional[str] = None, **kwargs ): """ :keyword original_module_entity: :paramtype original_module_entity: ~flow.models.ModuleEntity :keyword update_module_entity: :paramtype update_module_entity: ~flow.models.ModuleEntity :keyword module_name: :paramtype module_name: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword overwrite_with_original_name_and_version: :paramtype overwrite_with_original_name_and_version: bool :keyword snapshot_id: :paramtype snapshot_id: str """ super(ComponentUpdateRequest, self).__init__(**kwargs) self.original_module_entity = original_module_entity self.update_module_entity = update_module_entity self.module_name = module_name self.properties = properties self.overwrite_with_original_name_and_version = overwrite_with_original_name_and_version self.snapshot_id = snapshot_id class ComponentValidationRequest(msrest.serialization.Model): """ComponentValidationRequest. :ivar component_identifier: :vartype component_identifier: str :ivar compute_identity: :vartype compute_identity: ~flow.models.ComputeIdentityDto :ivar execution_context_dto: :vartype execution_context_dto: ~flow.models.ExecutionContextDto :ivar environment_definition: :vartype environment_definition: ~flow.models.EnvironmentDefinitionDto :ivar data_port_dtos: :vartype data_port_dtos: list[~flow.models.DataPortDto] """ _attribute_map = { 'component_identifier': {'key': 'componentIdentifier', 'type': 'str'}, 'compute_identity': {'key': 'computeIdentity', 'type': 'ComputeIdentityDto'}, 'execution_context_dto': {'key': 'executionContextDto', 'type': 'ExecutionContextDto'}, 'environment_definition': {'key': 'environmentDefinition', 'type': 'EnvironmentDefinitionDto'}, 'data_port_dtos': {'key': 'dataPortDtos', 'type': '[DataPortDto]'}, } def __init__( self, *, component_identifier: Optional[str] = None, compute_identity: Optional["ComputeIdentityDto"] = None, execution_context_dto: Optional["ExecutionContextDto"] = None, environment_definition: Optional["EnvironmentDefinitionDto"] = None, data_port_dtos: Optional[List["DataPortDto"]] = None, **kwargs ): """ :keyword component_identifier: :paramtype component_identifier: str :keyword compute_identity: :paramtype compute_identity: ~flow.models.ComputeIdentityDto :keyword execution_context_dto: :paramtype execution_context_dto: ~flow.models.ExecutionContextDto :keyword environment_definition: :paramtype environment_definition: ~flow.models.EnvironmentDefinitionDto :keyword data_port_dtos: :paramtype data_port_dtos: list[~flow.models.DataPortDto] """ super(ComponentValidationRequest, self).__init__(**kwargs) self.component_identifier = component_identifier self.compute_identity = compute_identity self.execution_context_dto = execution_context_dto self.environment_definition = environment_definition self.data_port_dtos = data_port_dtos class ComponentValidationResponse(msrest.serialization.Model): """ComponentValidationResponse. :ivar status: Possible values include: "Succeeded", "Failed". :vartype status: str or ~flow.models.ValidationStatus :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse """ _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, } def __init__( self, *, status: Optional[Union[str, "ValidationStatus"]] = None, error: Optional["ErrorResponse"] = None, **kwargs ): """ :keyword status: Possible values include: "Succeeded", "Failed". :paramtype status: str or ~flow.models.ValidationStatus :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse """ super(ComponentValidationResponse, self).__init__(**kwargs) self.status = status self.error = error class Compute(msrest.serialization.Model): """Compute. :ivar target: :vartype target: str :ivar target_type: :vartype target_type: str :ivar vm_size: :vartype vm_size: str :ivar instance_type: :vartype instance_type: str :ivar instance_count: :vartype instance_count: int :ivar gpu_count: :vartype gpu_count: int :ivar priority: :vartype priority: str :ivar region: :vartype region: str :ivar arm_id: :vartype arm_id: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'target': {'key': 'target', 'type': 'str'}, 'target_type': {'key': 'targetType', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'gpu_count': {'key': 'gpuCount', 'type': 'int'}, 'priority': {'key': 'priority', 'type': 'str'}, 'region': {'key': 'region', 'type': 'str'}, 'arm_id': {'key': 'armId', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, *, target: Optional[str] = None, target_type: Optional[str] = None, vm_size: Optional[str] = None, instance_type: Optional[str] = None, instance_count: Optional[int] = None, gpu_count: Optional[int] = None, priority: Optional[str] = None, region: Optional[str] = None, arm_id: Optional[str] = None, properties: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword target: :paramtype target: str :keyword target_type: :paramtype target_type: str :keyword vm_size: :paramtype vm_size: str :keyword instance_type: :paramtype instance_type: str :keyword instance_count: :paramtype instance_count: int :keyword gpu_count: :paramtype gpu_count: int :keyword priority: :paramtype priority: str :keyword region: :paramtype region: str :keyword arm_id: :paramtype arm_id: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(Compute, self).__init__(**kwargs) self.target = target self.target_type = target_type self.vm_size = vm_size self.instance_type = instance_type self.instance_count = instance_count self.gpu_count = gpu_count self.priority = priority self.region = region self.arm_id = arm_id self.properties = properties class ComputeConfiguration(msrest.serialization.Model): """ComputeConfiguration. :ivar target: :vartype target: str :ivar instance_count: :vartype instance_count: int :ivar max_instance_count: :vartype max_instance_count: int :ivar is_local: :vartype is_local: bool :ivar location: :vartype location: str :ivar is_clusterless: :vartype is_clusterless: bool :ivar instance_type: :vartype instance_type: str :ivar instance_priority: :vartype instance_priority: str :ivar job_priority: :vartype job_priority: int :ivar shm_size: :vartype shm_size: str :ivar docker_args: :vartype docker_args: str :ivar locations: :vartype locations: list[str] :ivar properties: Dictionary of :code:`<any>`. :vartype properties: dict[str, any] """ _attribute_map = { 'target': {'key': 'target', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'}, 'is_local': {'key': 'isLocal', 'type': 'bool'}, 'location': {'key': 'location', 'type': 'str'}, 'is_clusterless': {'key': 'isClusterless', 'type': 'bool'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'instance_priority': {'key': 'instancePriority', 'type': 'str'}, 'job_priority': {'key': 'jobPriority', 'type': 'int'}, 'shm_size': {'key': 'shmSize', 'type': 'str'}, 'docker_args': {'key': 'dockerArgs', 'type': 'str'}, 'locations': {'key': 'locations', 'type': '[str]'}, 'properties': {'key': 'properties', 'type': '{object}'}, } def __init__( self, *, target: Optional[str] = None, instance_count: Optional[int] = None, max_instance_count: Optional[int] = None, is_local: Optional[bool] = None, location: Optional[str] = None, is_clusterless: Optional[bool] = None, instance_type: Optional[str] = None, instance_priority: Optional[str] = None, job_priority: Optional[int] = None, shm_size: Optional[str] = None, docker_args: Optional[str] = None, locations: Optional[List[str]] = None, properties: Optional[Dict[str, Any]] = None, **kwargs ): """ :keyword target: :paramtype target: str :keyword instance_count: :paramtype instance_count: int :keyword max_instance_count: :paramtype max_instance_count: int :keyword is_local: :paramtype is_local: bool :keyword location: :paramtype location: str :keyword is_clusterless: :paramtype is_clusterless: bool :keyword instance_type: :paramtype instance_type: str :keyword instance_priority: :paramtype instance_priority: str :keyword job_priority: :paramtype job_priority: int :keyword shm_size: :paramtype shm_size: str :keyword docker_args: :paramtype docker_args: str :keyword locations: :paramtype locations: list[str] :keyword properties: Dictionary of :code:`<any>`. :paramtype properties: dict[str, any] """ super(ComputeConfiguration, self).__init__(**kwargs) self.target = target self.instance_count = instance_count self.max_instance_count = max_instance_count self.is_local = is_local self.location = location self.is_clusterless = is_clusterless self.instance_type = instance_type self.instance_priority = instance_priority self.job_priority = job_priority self.shm_size = shm_size self.docker_args = docker_args self.locations = locations self.properties = properties class ComputeContract(msrest.serialization.Model): """ComputeContract. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar type: :vartype type: str :ivar location: :vartype location: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar identity: :vartype identity: ~flow.models.ComputeIdentityContract :ivar properties: :vartype properties: ~flow.models.ComputeProperties """ _validation = { 'type': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'identity': {'key': 'identity', 'type': 'ComputeIdentityContract'}, 'properties': {'key': 'properties', 'type': 'ComputeProperties'}, } def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, identity: Optional["ComputeIdentityContract"] = None, properties: Optional["ComputeProperties"] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword location: :paramtype location: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword identity: :paramtype identity: ~flow.models.ComputeIdentityContract :keyword properties: :paramtype properties: ~flow.models.ComputeProperties """ super(ComputeContract, self).__init__(**kwargs) self.id = id self.name = name self.type = None self.location = location self.tags = tags self.identity = identity self.properties = properties class ComputeIdentityContract(msrest.serialization.Model): """ComputeIdentityContract. :ivar type: :vartype type: str :ivar system_identity_url: :vartype system_identity_url: str :ivar principal_id: :vartype principal_id: str :ivar tenant_id: :vartype tenant_id: str :ivar client_id: :vartype client_id: str :ivar client_secret_url: :vartype client_secret_url: str :ivar user_assigned_identities: This is a dictionary. :vartype user_assigned_identities: dict[str, ~flow.models.ComputeRPUserAssignedIdentity] """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'system_identity_url': {'key': 'systemIdentityUrl', 'type': 'str'}, 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, 'client_id': {'key': 'clientId', 'type': 'str'}, 'client_secret_url': {'key': 'clientSecretUrl', 'type': 'str'}, 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{ComputeRPUserAssignedIdentity}'}, } def __init__( self, *, type: Optional[str] = None, system_identity_url: Optional[str] = None, principal_id: Optional[str] = None, tenant_id: Optional[str] = None, client_id: Optional[str] = None, client_secret_url: Optional[str] = None, user_assigned_identities: Optional[Dict[str, "ComputeRPUserAssignedIdentity"]] = None, **kwargs ): """ :keyword type: :paramtype type: str :keyword system_identity_url: :paramtype system_identity_url: str :keyword principal_id: :paramtype principal_id: str :keyword tenant_id: :paramtype tenant_id: str :keyword client_id: :paramtype client_id: str :keyword client_secret_url: :paramtype client_secret_url: str :keyword user_assigned_identities: This is a dictionary. :paramtype user_assigned_identities: dict[str, ~flow.models.ComputeRPUserAssignedIdentity] """ super(ComputeIdentityContract, self).__init__(**kwargs) self.type = type self.system_identity_url = system_identity_url self.principal_id = principal_id self.tenant_id = tenant_id self.client_id = client_id self.client_secret_url = client_secret_url self.user_assigned_identities = user_assigned_identities class ComputeIdentityDto(msrest.serialization.Model): """ComputeIdentityDto. :ivar compute_name: :vartype compute_name: str :ivar compute_target_type: Possible values include: "Local", "Remote", "HdiCluster", "ContainerInstance", "AmlCompute", "ComputeInstance", "Cmk8s", "SynapseSpark", "Kubernetes", "Aisc", "GlobalJobDispatcher", "Databricks", "MockedCompute". :vartype compute_target_type: str or ~flow.models.ComputeTargetType :ivar intellectual_property_publisher: :vartype intellectual_property_publisher: str """ _attribute_map = { 'compute_name': {'key': 'computeName', 'type': 'str'}, 'compute_target_type': {'key': 'computeTargetType', 'type': 'str'}, 'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'}, } def __init__( self, *, compute_name: Optional[str] = None, compute_target_type: Optional[Union[str, "ComputeTargetType"]] = None, intellectual_property_publisher: Optional[str] = None, **kwargs ): """ :keyword compute_name: :paramtype compute_name: str :keyword compute_target_type: Possible values include: "Local", "Remote", "HdiCluster", "ContainerInstance", "AmlCompute", "ComputeInstance", "Cmk8s", "SynapseSpark", "Kubernetes", "Aisc", "GlobalJobDispatcher", "Databricks", "MockedCompute". :paramtype compute_target_type: str or ~flow.models.ComputeTargetType :keyword intellectual_property_publisher: :paramtype intellectual_property_publisher: str """ super(ComputeIdentityDto, self).__init__(**kwargs) self.compute_name = compute_name self.compute_target_type = compute_target_type self.intellectual_property_publisher = intellectual_property_publisher class ComputeInfo(msrest.serialization.Model): """ComputeInfo. :ivar name: :vartype name: str :ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN". :vartype compute_type: str or ~flow.models.ComputeEnvironmentType :ivar is_ssl_enabled: :vartype is_ssl_enabled: bool :ivar is_gpu_type: :vartype is_gpu_type: bool :ivar cluster_purpose: :vartype cluster_purpose: str :ivar public_ip_address: :vartype public_ip_address: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'is_ssl_enabled': {'key': 'isSslEnabled', 'type': 'bool'}, 'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'}, 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'}, 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, compute_type: Optional[Union[str, "ComputeEnvironmentType"]] = None, is_ssl_enabled: Optional[bool] = None, is_gpu_type: Optional[bool] = None, cluster_purpose: Optional[str] = None, public_ip_address: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN". :paramtype compute_type: str or ~flow.models.ComputeEnvironmentType :keyword is_ssl_enabled: :paramtype is_ssl_enabled: bool :keyword is_gpu_type: :paramtype is_gpu_type: bool :keyword cluster_purpose: :paramtype cluster_purpose: str :keyword public_ip_address: :paramtype public_ip_address: str """ super(ComputeInfo, self).__init__(**kwargs) self.name = name self.compute_type = compute_type self.is_ssl_enabled = is_ssl_enabled self.is_gpu_type = is_gpu_type self.cluster_purpose = cluster_purpose self.public_ip_address = public_ip_address class ComputeProperties(msrest.serialization.Model): """ComputeProperties. All required parameters must be populated in order to send to Azure. :ivar created_on: :vartype created_on: ~datetime.datetime :ivar modified_on: :vartype modified_on: ~datetime.datetime :ivar disable_local_auth: :vartype disable_local_auth: bool :ivar description: :vartype description: str :ivar resource_id: :vartype resource_id: str :ivar compute_type: Required. :vartype compute_type: str :ivar compute_location: :vartype compute_location: str :ivar provisioning_state: Possible values include: "Unknown", "Updating", "Creating", "Deleting", "Accepted", "Succeeded", "Failed", "Canceled". :vartype provisioning_state: str or ~flow.models.ProvisioningState :ivar provisioning_errors: :vartype provisioning_errors: list[~flow.models.ODataErrorResponse] :ivar provisioning_warnings: This is a dictionary. :vartype provisioning_warnings: dict[str, str] :ivar is_attached_compute: :vartype is_attached_compute: bool :ivar properties: Any object. :vartype properties: any :ivar status: :vartype status: ~flow.models.ComputeStatus :ivar warnings: :vartype warnings: list[~flow.models.ComputeWarning] """ _validation = { 'compute_type': {'required': True, 'min_length': 1}, } _attribute_map = { 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'compute_location': {'key': 'computeLocation', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ODataErrorResponse]'}, 'provisioning_warnings': {'key': 'provisioningWarnings', 'type': '{str}'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, 'properties': {'key': 'properties', 'type': 'object'}, 'status': {'key': 'status', 'type': 'ComputeStatus'}, 'warnings': {'key': 'warnings', 'type': '[ComputeWarning]'}, } def __init__( self, *, compute_type: str, created_on: Optional[datetime.datetime] = None, modified_on: Optional[datetime.datetime] = None, disable_local_auth: Optional[bool] = None, description: Optional[str] = None, resource_id: Optional[str] = None, compute_location: Optional[str] = None, provisioning_state: Optional[Union[str, "ProvisioningState"]] = None, provisioning_errors: Optional[List["ODataErrorResponse"]] = None, provisioning_warnings: Optional[Dict[str, str]] = None, is_attached_compute: Optional[bool] = None, properties: Optional[Any] = None, status: Optional["ComputeStatus"] = None, warnings: Optional[List["ComputeWarning"]] = None, **kwargs ): """ :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword modified_on: :paramtype modified_on: ~datetime.datetime :keyword disable_local_auth: :paramtype disable_local_auth: bool :keyword description: :paramtype description: str :keyword resource_id: :paramtype resource_id: str :keyword compute_type: Required. :paramtype compute_type: str :keyword compute_location: :paramtype compute_location: str :keyword provisioning_state: Possible values include: "Unknown", "Updating", "Creating", "Deleting", "Accepted", "Succeeded", "Failed", "Canceled". :paramtype provisioning_state: str or ~flow.models.ProvisioningState :keyword provisioning_errors: :paramtype provisioning_errors: list[~flow.models.ODataErrorResponse] :keyword provisioning_warnings: This is a dictionary. :paramtype provisioning_warnings: dict[str, str] :keyword is_attached_compute: :paramtype is_attached_compute: bool :keyword properties: Any object. :paramtype properties: any :keyword status: :paramtype status: ~flow.models.ComputeStatus :keyword warnings: :paramtype warnings: list[~flow.models.ComputeWarning] """ super(ComputeProperties, self).__init__(**kwargs) self.created_on = created_on self.modified_on = modified_on self.disable_local_auth = disable_local_auth self.description = description self.resource_id = resource_id self.compute_type = compute_type self.compute_location = compute_location self.provisioning_state = provisioning_state self.provisioning_errors = provisioning_errors self.provisioning_warnings = provisioning_warnings self.is_attached_compute = is_attached_compute self.properties = properties self.status = status self.warnings = warnings class ComputeRequest(msrest.serialization.Model): """ComputeRequest. :ivar node_count: :vartype node_count: int :ivar gpu_count: :vartype gpu_count: int """ _attribute_map = { 'node_count': {'key': 'nodeCount', 'type': 'int'}, 'gpu_count': {'key': 'gpuCount', 'type': 'int'}, } def __init__( self, *, node_count: Optional[int] = None, gpu_count: Optional[int] = None, **kwargs ): """ :keyword node_count: :paramtype node_count: int :keyword gpu_count: :paramtype gpu_count: int """ super(ComputeRequest, self).__init__(**kwargs) self.node_count = node_count self.gpu_count = gpu_count class ComputeRPUserAssignedIdentity(msrest.serialization.Model): """ComputeRPUserAssignedIdentity. :ivar principal_id: :vartype principal_id: str :ivar tenant_id: :vartype tenant_id: str :ivar client_id: :vartype client_id: str :ivar client_secret_url: :vartype client_secret_url: str :ivar resource_id: :vartype resource_id: str """ _attribute_map = { 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, 'client_id': {'key': 'clientId', 'type': 'str'}, 'client_secret_url': {'key': 'clientSecretUrl', 'type': 'str'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, } def __init__( self, *, principal_id: Optional[str] = None, tenant_id: Optional[str] = None, client_id: Optional[str] = None, client_secret_url: Optional[str] = None, resource_id: Optional[str] = None, **kwargs ): """ :keyword principal_id: :paramtype principal_id: str :keyword tenant_id: :paramtype tenant_id: str :keyword client_id: :paramtype client_id: str :keyword client_secret_url: :paramtype client_secret_url: str :keyword resource_id: :paramtype resource_id: str """ super(ComputeRPUserAssignedIdentity, self).__init__(**kwargs) self.principal_id = principal_id self.tenant_id = tenant_id self.client_id = client_id self.client_secret_url = client_secret_url self.resource_id = resource_id class ComputeSetting(msrest.serialization.Model): """ComputeSetting. :ivar name: :vartype name: str :ivar compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker", "Databricks", "Aisc". :vartype compute_type: str or ~flow.models.ComputeType :ivar batch_ai_compute_info: :vartype batch_ai_compute_info: ~flow.models.BatchAiComputeInfo :ivar remote_docker_compute_info: :vartype remote_docker_compute_info: ~flow.models.RemoteDockerComputeInfo :ivar hdi_cluster_compute_info: :vartype hdi_cluster_compute_info: ~flow.models.HdiClusterComputeInfo :ivar mlc_compute_info: :vartype mlc_compute_info: ~flow.models.MlcComputeInfo :ivar databricks_compute_info: :vartype databricks_compute_info: ~flow.models.DatabricksComputeInfo """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'batch_ai_compute_info': {'key': 'batchAiComputeInfo', 'type': 'BatchAiComputeInfo'}, 'remote_docker_compute_info': {'key': 'remoteDockerComputeInfo', 'type': 'RemoteDockerComputeInfo'}, 'hdi_cluster_compute_info': {'key': 'hdiClusterComputeInfo', 'type': 'HdiClusterComputeInfo'}, 'mlc_compute_info': {'key': 'mlcComputeInfo', 'type': 'MlcComputeInfo'}, 'databricks_compute_info': {'key': 'databricksComputeInfo', 'type': 'DatabricksComputeInfo'}, } def __init__( self, *, name: Optional[str] = None, compute_type: Optional[Union[str, "ComputeType"]] = None, batch_ai_compute_info: Optional["BatchAiComputeInfo"] = None, remote_docker_compute_info: Optional["RemoteDockerComputeInfo"] = None, hdi_cluster_compute_info: Optional["HdiClusterComputeInfo"] = None, mlc_compute_info: Optional["MlcComputeInfo"] = None, databricks_compute_info: Optional["DatabricksComputeInfo"] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker", "Databricks", "Aisc". :paramtype compute_type: str or ~flow.models.ComputeType :keyword batch_ai_compute_info: :paramtype batch_ai_compute_info: ~flow.models.BatchAiComputeInfo :keyword remote_docker_compute_info: :paramtype remote_docker_compute_info: ~flow.models.RemoteDockerComputeInfo :keyword hdi_cluster_compute_info: :paramtype hdi_cluster_compute_info: ~flow.models.HdiClusterComputeInfo :keyword mlc_compute_info: :paramtype mlc_compute_info: ~flow.models.MlcComputeInfo :keyword databricks_compute_info: :paramtype databricks_compute_info: ~flow.models.DatabricksComputeInfo """ super(ComputeSetting, self).__init__(**kwargs) self.name = name self.compute_type = compute_type self.batch_ai_compute_info = batch_ai_compute_info self.remote_docker_compute_info = remote_docker_compute_info self.hdi_cluster_compute_info = hdi_cluster_compute_info self.mlc_compute_info = mlc_compute_info self.databricks_compute_info = databricks_compute_info class ComputeStatus(msrest.serialization.Model): """ComputeStatus. Variables are only populated by the server, and will be ignored when sending a request. :ivar is_status_available: :vartype is_status_available: bool :ivar detailed_status: Anything. :vartype detailed_status: any :ivar error: Represents OData v4 error object. :vartype error: ~flow.models.ODataError """ _validation = { 'is_status_available': {'readonly': True}, } _attribute_map = { 'is_status_available': {'key': 'isStatusAvailable', 'type': 'bool'}, 'detailed_status': {'key': 'detailedStatus', 'type': 'object'}, 'error': {'key': 'error', 'type': 'ODataError'}, } def __init__( self, *, detailed_status: Optional[Any] = None, error: Optional["ODataError"] = None, **kwargs ): """ :keyword detailed_status: Anything. :paramtype detailed_status: any :keyword error: Represents OData v4 error object. :paramtype error: ~flow.models.ODataError """ super(ComputeStatus, self).__init__(**kwargs) self.is_status_available = None self.detailed_status = detailed_status self.error = error class ComputeStatusDetail(msrest.serialization.Model): """ComputeStatusDetail. :ivar provisioning_state: :vartype provisioning_state: str :ivar provisioning_error_message: :vartype provisioning_error_message: str """ _attribute_map = { 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'provisioning_error_message': {'key': 'provisioningErrorMessage', 'type': 'str'}, } def __init__( self, *, provisioning_state: Optional[str] = None, provisioning_error_message: Optional[str] = None, **kwargs ): """ :keyword provisioning_state: :paramtype provisioning_state: str :keyword provisioning_error_message: :paramtype provisioning_error_message: str """ super(ComputeStatusDetail, self).__init__(**kwargs) self.provisioning_state = provisioning_state self.provisioning_error_message = provisioning_error_message class ComputeWarning(msrest.serialization.Model): """ComputeWarning. :ivar title: :vartype title: str :ivar message: :vartype message: str :ivar code: :vartype code: str :ivar severity: Possible values include: "Critical", "Error", "Warning", "Info". :vartype severity: str or ~flow.models.SeverityLevel """ _attribute_map = { 'title': {'key': 'title', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'code': {'key': 'code', 'type': 'str'}, 'severity': {'key': 'severity', 'type': 'str'}, } def __init__( self, *, title: Optional[str] = None, message: Optional[str] = None, code: Optional[str] = None, severity: Optional[Union[str, "SeverityLevel"]] = None, **kwargs ): """ :keyword title: :paramtype title: str :keyword message: :paramtype message: str :keyword code: :paramtype code: str :keyword severity: Possible values include: "Critical", "Error", "Warning", "Info". :paramtype severity: str or ~flow.models.SeverityLevel """ super(ComputeWarning, self).__init__(**kwargs) self.title = title self.message = message self.code = code self.severity = severity class ConnectionConfigSpec(msrest.serialization.Model): """ConnectionConfigSpec. :ivar name: :vartype name: str :ivar display_name: :vartype display_name: str :ivar config_value_type: Possible values include: "String", "Secret". :vartype config_value_type: str or ~flow.models.ConfigValueType :ivar description: :vartype description: str :ivar default_value: :vartype default_value: str :ivar enum_values: :vartype enum_values: list[str] :ivar is_optional: :vartype is_optional: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'config_value_type': {'key': 'configValueType', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, 'enum_values': {'key': 'enumValues', 'type': '[str]'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, } def __init__( self, *, name: Optional[str] = None, display_name: Optional[str] = None, config_value_type: Optional[Union[str, "ConfigValueType"]] = None, description: Optional[str] = None, default_value: Optional[str] = None, enum_values: Optional[List[str]] = None, is_optional: Optional[bool] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword display_name: :paramtype display_name: str :keyword config_value_type: Possible values include: "String", "Secret". :paramtype config_value_type: str or ~flow.models.ConfigValueType :keyword description: :paramtype description: str :keyword default_value: :paramtype default_value: str :keyword enum_values: :paramtype enum_values: list[str] :keyword is_optional: :paramtype is_optional: bool """ super(ConnectionConfigSpec, self).__init__(**kwargs) self.name = name self.display_name = display_name self.config_value_type = config_value_type self.description = description self.default_value = default_value self.enum_values = enum_values self.is_optional = is_optional class ConnectionDto(msrest.serialization.Model): """ConnectionDto. :ivar connection_name: :vartype connection_name: str :ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :vartype connection_type: str or ~flow.models.ConnectionType :ivar configs: This is a dictionary. :vartype configs: dict[str, str] :ivar custom_configs: This is a dictionary. :vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :ivar expiry_time: :vartype expiry_time: ~datetime.datetime :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'connection_name': {'key': 'connectionName', 'type': 'str'}, 'connection_type': {'key': 'connectionType', 'type': 'str'}, 'configs': {'key': 'configs', 'type': '{str}'}, 'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'}, 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, connection_name: Optional[str] = None, connection_type: Optional[Union[str, "ConnectionType"]] = None, configs: Optional[Dict[str, str]] = None, custom_configs: Optional[Dict[str, "CustomConnectionConfig"]] = None, expiry_time: Optional[datetime.datetime] = None, owner: Optional["SchemaContractsCreatedBy"] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword connection_name: :paramtype connection_name: str :keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :paramtype connection_type: str or ~flow.models.ConnectionType :keyword configs: This is a dictionary. :paramtype configs: dict[str, str] :keyword custom_configs: This is a dictionary. :paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(ConnectionDto, self).__init__(**kwargs) self.connection_name = connection_name self.connection_type = connection_type self.configs = configs self.custom_configs = custom_configs self.expiry_time = expiry_time self.owner = owner self.created_date = created_date self.last_modified_date = last_modified_date class ConnectionEntity(msrest.serialization.Model): """ConnectionEntity. :ivar connection_id: :vartype connection_id: str :ivar connection_name: :vartype connection_name: str :ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :vartype connection_type: str or ~flow.models.ConnectionType :ivar connection_scope: Possible values include: "User", "WorkspaceShared". :vartype connection_scope: str or ~flow.models.ConnectionScope :ivar configs: This is a dictionary. :vartype configs: dict[str, str] :ivar custom_configs: This is a dictionary. :vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :ivar expiry_time: :vartype expiry_time: ~datetime.datetime :ivar secret_name: :vartype secret_name: str :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'connection_id': {'key': 'connectionId', 'type': 'str'}, 'connection_name': {'key': 'connectionName', 'type': 'str'}, 'connection_type': {'key': 'connectionType', 'type': 'str'}, 'connection_scope': {'key': 'connectionScope', 'type': 'str'}, 'configs': {'key': 'configs', 'type': '{str}'}, 'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'}, 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, 'secret_name': {'key': 'secretName', 'type': 'str'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, connection_id: Optional[str] = None, connection_name: Optional[str] = None, connection_type: Optional[Union[str, "ConnectionType"]] = None, connection_scope: Optional[Union[str, "ConnectionScope"]] = None, configs: Optional[Dict[str, str]] = None, custom_configs: Optional[Dict[str, "CustomConnectionConfig"]] = None, expiry_time: Optional[datetime.datetime] = None, secret_name: Optional[str] = None, owner: Optional["SchemaContractsCreatedBy"] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword connection_id: :paramtype connection_id: str :keyword connection_name: :paramtype connection_name: str :keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :paramtype connection_type: str or ~flow.models.ConnectionType :keyword connection_scope: Possible values include: "User", "WorkspaceShared". :paramtype connection_scope: str or ~flow.models.ConnectionScope :keyword configs: This is a dictionary. :paramtype configs: dict[str, str] :keyword custom_configs: This is a dictionary. :paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime :keyword secret_name: :paramtype secret_name: str :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(ConnectionEntity, self).__init__(**kwargs) self.connection_id = connection_id self.connection_name = connection_name self.connection_type = connection_type self.connection_scope = connection_scope self.configs = configs self.custom_configs = custom_configs self.expiry_time = expiry_time self.secret_name = secret_name self.owner = owner self.created_date = created_date self.last_modified_date = last_modified_date class ConnectionOverrideSetting(msrest.serialization.Model): """ConnectionOverrideSetting. :ivar connection_source_type: Possible values include: "Node", "NodeInput". :vartype connection_source_type: str or ~flow.models.ConnectionSourceType :ivar node_name: :vartype node_name: str :ivar node_input_name: :vartype node_input_name: str :ivar node_deployment_name_input: :vartype node_deployment_name_input: str :ivar node_model_input: :vartype node_model_input: str :ivar connection_name: :vartype connection_name: str :ivar deployment_name: :vartype deployment_name: str :ivar model: :vartype model: str :ivar connection_types: :vartype connection_types: list[str or ~flow.models.ConnectionType] :ivar capabilities: :vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities :ivar model_enum: :vartype model_enum: list[str] """ _attribute_map = { 'connection_source_type': {'key': 'connectionSourceType', 'type': 'str'}, 'node_name': {'key': 'nodeName', 'type': 'str'}, 'node_input_name': {'key': 'nodeInputName', 'type': 'str'}, 'node_deployment_name_input': {'key': 'nodeDeploymentNameInput', 'type': 'str'}, 'node_model_input': {'key': 'nodeModelInput', 'type': 'str'}, 'connection_name': {'key': 'connectionName', 'type': 'str'}, 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, 'model': {'key': 'model', 'type': 'str'}, 'connection_types': {'key': 'connectionTypes', 'type': '[str]'}, 'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'}, 'model_enum': {'key': 'modelEnum', 'type': '[str]'}, } def __init__( self, *, connection_source_type: Optional[Union[str, "ConnectionSourceType"]] = None, node_name: Optional[str] = None, node_input_name: Optional[str] = None, node_deployment_name_input: Optional[str] = None, node_model_input: Optional[str] = None, connection_name: Optional[str] = None, deployment_name: Optional[str] = None, model: Optional[str] = None, connection_types: Optional[List[Union[str, "ConnectionType"]]] = None, capabilities: Optional["AzureOpenAIModelCapabilities"] = None, model_enum: Optional[List[str]] = None, **kwargs ): """ :keyword connection_source_type: Possible values include: "Node", "NodeInput". :paramtype connection_source_type: str or ~flow.models.ConnectionSourceType :keyword node_name: :paramtype node_name: str :keyword node_input_name: :paramtype node_input_name: str :keyword node_deployment_name_input: :paramtype node_deployment_name_input: str :keyword node_model_input: :paramtype node_model_input: str :keyword connection_name: :paramtype connection_name: str :keyword deployment_name: :paramtype deployment_name: str :keyword model: :paramtype model: str :keyword connection_types: :paramtype connection_types: list[str or ~flow.models.ConnectionType] :keyword capabilities: :paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities :keyword model_enum: :paramtype model_enum: list[str] """ super(ConnectionOverrideSetting, self).__init__(**kwargs) self.connection_source_type = connection_source_type self.node_name = node_name self.node_input_name = node_input_name self.node_deployment_name_input = node_deployment_name_input self.node_model_input = node_model_input self.connection_name = connection_name self.deployment_name = deployment_name self.model = model self.connection_types = connection_types self.capabilities = capabilities self.model_enum = model_enum class ConnectionSpec(msrest.serialization.Model): """ConnectionSpec. :ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :vartype connection_type: str or ~flow.models.ConnectionType :ivar config_specs: :vartype config_specs: list[~flow.models.ConnectionConfigSpec] """ _attribute_map = { 'connection_type': {'key': 'connectionType', 'type': 'str'}, 'config_specs': {'key': 'configSpecs', 'type': '[ConnectionConfigSpec]'}, } def __init__( self, *, connection_type: Optional[Union[str, "ConnectionType"]] = None, config_specs: Optional[List["ConnectionConfigSpec"]] = None, **kwargs ): """ :keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :paramtype connection_type: str or ~flow.models.ConnectionType :keyword config_specs: :paramtype config_specs: list[~flow.models.ConnectionConfigSpec] """ super(ConnectionSpec, self).__init__(**kwargs) self.connection_type = connection_type self.config_specs = config_specs class ContainerInstanceConfiguration(msrest.serialization.Model): """ContainerInstanceConfiguration. :ivar region: :vartype region: str :ivar cpu_cores: :vartype cpu_cores: float :ivar memory_gb: :vartype memory_gb: float """ _attribute_map = { 'region': {'key': 'region', 'type': 'str'}, 'cpu_cores': {'key': 'cpuCores', 'type': 'float'}, 'memory_gb': {'key': 'memoryGb', 'type': 'float'}, } def __init__( self, *, region: Optional[str] = None, cpu_cores: Optional[float] = None, memory_gb: Optional[float] = None, **kwargs ): """ :keyword region: :paramtype region: str :keyword cpu_cores: :paramtype cpu_cores: float :keyword memory_gb: :paramtype memory_gb: float """ super(ContainerInstanceConfiguration, self).__init__(**kwargs) self.region = region self.cpu_cores = cpu_cores self.memory_gb = memory_gb class ContainerRegistry(msrest.serialization.Model): """ContainerRegistry. :ivar address: :vartype address: str :ivar username: :vartype username: str :ivar password: :vartype password: str :ivar credential_type: :vartype credential_type: str :ivar registry_identity: :vartype registry_identity: ~flow.models.RegistryIdentity """ _attribute_map = { 'address': {'key': 'address', 'type': 'str'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'credential_type': {'key': 'credentialType', 'type': 'str'}, 'registry_identity': {'key': 'registryIdentity', 'type': 'RegistryIdentity'}, } def __init__( self, *, address: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, credential_type: Optional[str] = None, registry_identity: Optional["RegistryIdentity"] = None, **kwargs ): """ :keyword address: :paramtype address: str :keyword username: :paramtype username: str :keyword password: :paramtype password: str :keyword credential_type: :paramtype credential_type: str :keyword registry_identity: :paramtype registry_identity: ~flow.models.RegistryIdentity """ super(ContainerRegistry, self).__init__(**kwargs) self.address = address self.username = username self.password = password self.credential_type = credential_type self.registry_identity = registry_identity class ContainerResourceRequirements(msrest.serialization.Model): """ContainerResourceRequirements. :ivar cpu: :vartype cpu: float :ivar cpu_limit: :vartype cpu_limit: float :ivar memory_in_gb: :vartype memory_in_gb: float :ivar memory_in_gb_limit: :vartype memory_in_gb_limit: float :ivar gpu_enabled: :vartype gpu_enabled: bool :ivar gpu: :vartype gpu: int :ivar fpga: :vartype fpga: int """ _attribute_map = { 'cpu': {'key': 'cpu', 'type': 'float'}, 'cpu_limit': {'key': 'cpuLimit', 'type': 'float'}, 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'}, 'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'}, 'gpu_enabled': {'key': 'gpuEnabled', 'type': 'bool'}, 'gpu': {'key': 'gpu', 'type': 'int'}, 'fpga': {'key': 'fpga', 'type': 'int'}, } def __init__( self, *, cpu: Optional[float] = None, cpu_limit: Optional[float] = None, memory_in_gb: Optional[float] = None, memory_in_gb_limit: Optional[float] = None, gpu_enabled: Optional[bool] = None, gpu: Optional[int] = None, fpga: Optional[int] = None, **kwargs ): """ :keyword cpu: :paramtype cpu: float :keyword cpu_limit: :paramtype cpu_limit: float :keyword memory_in_gb: :paramtype memory_in_gb: float :keyword memory_in_gb_limit: :paramtype memory_in_gb_limit: float :keyword gpu_enabled: :paramtype gpu_enabled: bool :keyword gpu: :paramtype gpu: int :keyword fpga: :paramtype fpga: int """ super(ContainerResourceRequirements, self).__init__(**kwargs) self.cpu = cpu self.cpu_limit = cpu_limit self.memory_in_gb = memory_in_gb self.memory_in_gb_limit = memory_in_gb_limit self.gpu_enabled = gpu_enabled self.gpu = gpu self.fpga = fpga class ControlInput(msrest.serialization.Model): """ControlInput. :ivar name: :vartype name: str :ivar default_value: Possible values include: "None", "False", "True", "Skipped". :vartype default_value: str or ~flow.models.ControlInputValue """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, default_value: Optional[Union[str, "ControlInputValue"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword default_value: Possible values include: "None", "False", "True", "Skipped". :paramtype default_value: str or ~flow.models.ControlInputValue """ super(ControlInput, self).__init__(**kwargs) self.name = name self.default_value = default_value class ControlOutput(msrest.serialization.Model): """ControlOutput. :ivar name: :vartype name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str """ super(ControlOutput, self).__init__(**kwargs) self.name = name class CopyDataTask(msrest.serialization.Model): """CopyDataTask. :ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :vartype data_copy_mode: str or ~flow.models.DataCopyMode """ _attribute_map = { 'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'}, } def __init__( self, *, data_copy_mode: Optional[Union[str, "DataCopyMode"]] = None, **kwargs ): """ :keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :paramtype data_copy_mode: str or ~flow.models.DataCopyMode """ super(CopyDataTask, self).__init__(**kwargs) self.data_copy_mode = data_copy_mode class CreatedBy(msrest.serialization.Model): """CreatedBy. :ivar user_object_id: :vartype user_object_id: str :ivar user_tenant_id: :vartype user_tenant_id: str :ivar user_name: :vartype user_name: str """ _attribute_map = { 'user_object_id': {'key': 'userObjectId', 'type': 'str'}, 'user_tenant_id': {'key': 'userTenantId', 'type': 'str'}, 'user_name': {'key': 'userName', 'type': 'str'}, } def __init__( self, *, user_object_id: Optional[str] = None, user_tenant_id: Optional[str] = None, user_name: Optional[str] = None, **kwargs ): """ :keyword user_object_id: :paramtype user_object_id: str :keyword user_tenant_id: :paramtype user_tenant_id: str :keyword user_name: :paramtype user_name: str """ super(CreatedBy, self).__init__(**kwargs) self.user_object_id = user_object_id self.user_tenant_id = user_tenant_id self.user_name = user_name class CreatedFromDto(msrest.serialization.Model): """CreatedFromDto. :ivar type: The only acceptable values to pass in are None and "Notebook". The default value is None. :vartype type: str :ivar location_type: The only acceptable values to pass in are None and "ArtifactId". The default value is None. :vartype location_type: str :ivar location: :vartype location: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'location_type': {'key': 'locationType', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, } def __init__( self, *, type: Optional[str] = None, location_type: Optional[str] = None, location: Optional[str] = None, **kwargs ): """ :keyword type: The only acceptable values to pass in are None and "Notebook". The default value is None. :paramtype type: str :keyword location_type: The only acceptable values to pass in are None and "ArtifactId". The default value is None. :paramtype location_type: str :keyword location: :paramtype location: str """ super(CreatedFromDto, self).__init__(**kwargs) self.type = type self.location_type = location_type self.location = location class CreateFlowFromSampleRequest(msrest.serialization.Model): """CreateFlowFromSampleRequest. :ivar flow_name: :vartype flow_name: str :ivar sample_resource_id: :vartype sample_resource_id: str :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar is_archived: :vartype is_archived: bool """ _attribute_map = { 'flow_name': {'key': 'flowName', 'type': 'str'}, 'sample_resource_id': {'key': 'sampleResourceId', 'type': 'str'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, } def __init__( self, *, flow_name: Optional[str] = None, sample_resource_id: Optional[str] = None, flow_definition_file_path: Optional[str] = None, tags: Optional[Dict[str, str]] = None, is_archived: Optional[bool] = None, **kwargs ): """ :keyword flow_name: :paramtype flow_name: str :keyword sample_resource_id: :paramtype sample_resource_id: str :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword is_archived: :paramtype is_archived: bool """ super(CreateFlowFromSampleRequest, self).__init__(**kwargs) self.flow_name = flow_name self.sample_resource_id = sample_resource_id self.flow_definition_file_path = flow_definition_file_path self.tags = tags self.is_archived = is_archived class CreateFlowRequest(msrest.serialization.Model): """CreateFlowRequest. :ivar flow_name: :vartype flow_name: str :ivar description: :vartype description: str :ivar details: :vartype details: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar flow: :vartype flow: ~flow.models.Flow :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar flow_run_settings: :vartype flow_run_settings: ~flow.models.FlowRunSettings :ivar is_archived: :vartype is_archived: bool :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar identity: :vartype identity: str """ _attribute_map = { 'flow_name': {'key': 'flowName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'details': {'key': 'details', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'flow': {'key': 'flow', 'type': 'Flow'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, *, flow_name: Optional[str] = None, description: Optional[str] = None, details: Optional[str] = None, tags: Optional[Dict[str, str]] = None, flow: Optional["Flow"] = None, flow_definition_file_path: Optional[str] = None, flow_type: Optional[Union[str, "FlowType"]] = None, flow_run_settings: Optional["FlowRunSettings"] = None, is_archived: Optional[bool] = None, vm_size: Optional[str] = None, max_idle_time_seconds: Optional[int] = None, identity: Optional[str] = None, **kwargs ): """ :keyword flow_name: :paramtype flow_name: str :keyword description: :paramtype description: str :keyword details: :paramtype details: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword flow: :paramtype flow: ~flow.models.Flow :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword flow_run_settings: :paramtype flow_run_settings: ~flow.models.FlowRunSettings :keyword is_archived: :paramtype is_archived: bool :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword identity: :paramtype identity: str """ super(CreateFlowRequest, self).__init__(**kwargs) self.flow_name = flow_name self.description = description self.details = details self.tags = tags self.flow = flow self.flow_definition_file_path = flow_definition_file_path self.flow_type = flow_type self.flow_run_settings = flow_run_settings self.is_archived = is_archived self.vm_size = vm_size self.max_idle_time_seconds = max_idle_time_seconds self.identity = identity class CreateFlowRuntimeRequest(msrest.serialization.Model): """CreateFlowRuntimeRequest. :ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession". :vartype runtime_type: str or ~flow.models.RuntimeType :ivar identity: :vartype identity: ~flow.models.ManagedServiceIdentity :ivar instance_type: :vartype instance_type: str :ivar from_existing_endpoint: :vartype from_existing_endpoint: bool :ivar from_existing_deployment: :vartype from_existing_deployment: bool :ivar endpoint_name: :vartype endpoint_name: str :ivar deployment_name: :vartype deployment_name: str :ivar compute_instance_name: :vartype compute_instance_name: str :ivar from_existing_custom_app: :vartype from_existing_custom_app: bool :ivar custom_app_name: :vartype custom_app_name: str :ivar runtime_description: :vartype runtime_description: str :ivar environment: :vartype environment: str :ivar instance_count: :vartype instance_count: int """ _attribute_map = { 'runtime_type': {'key': 'runtimeType', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'}, 'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'}, 'endpoint_name': {'key': 'endpointName', 'type': 'str'}, 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, 'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'}, 'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'}, 'custom_app_name': {'key': 'customAppName', 'type': 'str'}, 'runtime_description': {'key': 'runtimeDescription', 'type': 'str'}, 'environment': {'key': 'environment', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, } def __init__( self, *, runtime_type: Optional[Union[str, "RuntimeType"]] = None, identity: Optional["ManagedServiceIdentity"] = None, instance_type: Optional[str] = None, from_existing_endpoint: Optional[bool] = None, from_existing_deployment: Optional[bool] = None, endpoint_name: Optional[str] = None, deployment_name: Optional[str] = None, compute_instance_name: Optional[str] = None, from_existing_custom_app: Optional[bool] = None, custom_app_name: Optional[str] = None, runtime_description: Optional[str] = None, environment: Optional[str] = None, instance_count: Optional[int] = None, **kwargs ): """ :keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession". :paramtype runtime_type: str or ~flow.models.RuntimeType :keyword identity: :paramtype identity: ~flow.models.ManagedServiceIdentity :keyword instance_type: :paramtype instance_type: str :keyword from_existing_endpoint: :paramtype from_existing_endpoint: bool :keyword from_existing_deployment: :paramtype from_existing_deployment: bool :keyword endpoint_name: :paramtype endpoint_name: str :keyword deployment_name: :paramtype deployment_name: str :keyword compute_instance_name: :paramtype compute_instance_name: str :keyword from_existing_custom_app: :paramtype from_existing_custom_app: bool :keyword custom_app_name: :paramtype custom_app_name: str :keyword runtime_description: :paramtype runtime_description: str :keyword environment: :paramtype environment: str :keyword instance_count: :paramtype instance_count: int """ super(CreateFlowRuntimeRequest, self).__init__(**kwargs) self.runtime_type = runtime_type self.identity = identity self.instance_type = instance_type self.from_existing_endpoint = from_existing_endpoint self.from_existing_deployment = from_existing_deployment self.endpoint_name = endpoint_name self.deployment_name = deployment_name self.compute_instance_name = compute_instance_name self.from_existing_custom_app = from_existing_custom_app self.custom_app_name = custom_app_name self.runtime_description = runtime_description self.environment = environment self.instance_count = instance_count class CreateFlowSessionRequest(msrest.serialization.Model): """CreateFlowSessionRequest. :ivar python_pip_requirements: :vartype python_pip_requirements: list[str] :ivar base_image: :vartype base_image: str :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar action: Possible values include: "Install", "Reset", "Update", "Delete". :vartype action: str or ~flow.models.SetupFlowSessionAction :ivar identity: :vartype identity: str """ _attribute_map = { 'python_pip_requirements': {'key': 'pythonPipRequirements', 'type': '[str]'}, 'base_image': {'key': 'baseImage', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'action': {'key': 'action', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, *, python_pip_requirements: Optional[List[str]] = None, base_image: Optional[str] = None, vm_size: Optional[str] = None, max_idle_time_seconds: Optional[int] = None, action: Optional[Union[str, "SetupFlowSessionAction"]] = None, identity: Optional[str] = None, **kwargs ): """ :keyword python_pip_requirements: :paramtype python_pip_requirements: list[str] :keyword base_image: :paramtype base_image: str :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword action: Possible values include: "Install", "Reset", "Update", "Delete". :paramtype action: str or ~flow.models.SetupFlowSessionAction :keyword identity: :paramtype identity: str """ super(CreateFlowSessionRequest, self).__init__(**kwargs) self.python_pip_requirements = python_pip_requirements self.base_image = base_image self.vm_size = vm_size self.max_idle_time_seconds = max_idle_time_seconds self.action = action self.identity = identity class CreateInferencePipelineRequest(msrest.serialization.Model): """CreateInferencePipelineRequest. :ivar module_node_id: :vartype module_node_id: str :ivar port_name: :vartype port_name: str :ivar training_pipeline_draft_name: :vartype training_pipeline_draft_name: str :ivar training_pipeline_run_display_name: :vartype training_pipeline_run_display_name: str :ivar name: :vartype name: str :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :vartype graph_components_mode: str or ~flow.models.GraphComponentsMode :ivar sub_pipelines_info: :vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo :ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar graph: :vartype graph: ~flow.models.GraphDraftEntity :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar description: :vartype description: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ _attribute_map = { 'module_node_id': {'key': 'moduleNodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, 'training_pipeline_draft_name': {'key': 'trainingPipelineDraftName', 'type': 'str'}, 'training_pipeline_run_display_name': {'key': 'trainingPipelineRunDisplayName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'}, 'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'}, 'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'}, 'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'graph': {'key': 'graph', 'type': 'GraphDraftEntity'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'}, } def __init__( self, *, module_node_id: Optional[str] = None, port_name: Optional[str] = None, training_pipeline_draft_name: Optional[str] = None, training_pipeline_run_display_name: Optional[str] = None, name: Optional[str] = None, pipeline_type: Optional[Union[str, "PipelineType"]] = None, pipeline_draft_mode: Optional[Union[str, "PipelineDraftMode"]] = None, graph_components_mode: Optional[Union[str, "GraphComponentsMode"]] = None, sub_pipelines_info: Optional["SubPipelinesInfo"] = None, flattened_sub_graphs: Optional[Dict[str, "PipelineSubDraft"]] = None, pipeline_parameters: Optional[Dict[str, str]] = None, data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None, data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None, asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None, graph: Optional["GraphDraftEntity"] = None, pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None, module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None, module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None, tags: Optional[Dict[str, str]] = None, continue_run_on_step_failure: Optional[bool] = None, description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, enforce_rerun: Optional[bool] = None, dataset_access_modes: Optional[Union[str, "DatasetAccessModes"]] = None, **kwargs ): """ :keyword module_node_id: :paramtype module_node_id: str :keyword port_name: :paramtype port_name: str :keyword training_pipeline_draft_name: :paramtype training_pipeline_draft_name: str :keyword training_pipeline_run_display_name: :paramtype training_pipeline_run_display_name: str :keyword name: :paramtype name: str :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode :keyword sub_pipelines_info: :paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo :keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword graph: :paramtype graph: ~flow.models.GraphDraftEntity :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword description: :paramtype description: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ super(CreateInferencePipelineRequest, self).__init__(**kwargs) self.module_node_id = module_node_id self.port_name = port_name self.training_pipeline_draft_name = training_pipeline_draft_name self.training_pipeline_run_display_name = training_pipeline_run_display_name self.name = name self.pipeline_type = pipeline_type self.pipeline_draft_mode = pipeline_draft_mode self.graph_components_mode = graph_components_mode self.sub_pipelines_info = sub_pipelines_info self.flattened_sub_graphs = flattened_sub_graphs self.pipeline_parameters = pipeline_parameters self.data_path_assignments = data_path_assignments self.data_set_definition_value_assignments = data_set_definition_value_assignments self.asset_output_settings_assignments = asset_output_settings_assignments self.graph = graph self.pipeline_run_settings = pipeline_run_settings self.module_node_run_settings = module_node_run_settings self.module_node_ui_input_settings = module_node_ui_input_settings self.tags = tags self.continue_run_on_step_failure = continue_run_on_step_failure self.description = description self.properties = properties self.enforce_rerun = enforce_rerun self.dataset_access_modes = dataset_access_modes class CreateOrUpdateConnectionRequest(msrest.serialization.Model): """CreateOrUpdateConnectionRequest. :ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :vartype connection_type: str or ~flow.models.ConnectionType :ivar connection_scope: Possible values include: "User", "WorkspaceShared". :vartype connection_scope: str or ~flow.models.ConnectionScope :ivar configs: This is a dictionary. :vartype configs: dict[str, str] :ivar custom_configs: This is a dictionary. :vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :ivar expiry_time: :vartype expiry_time: ~datetime.datetime """ _attribute_map = { 'connection_type': {'key': 'connectionType', 'type': 'str'}, 'connection_scope': {'key': 'connectionScope', 'type': 'str'}, 'configs': {'key': 'configs', 'type': '{str}'}, 'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'}, 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, } def __init__( self, *, connection_type: Optional[Union[str, "ConnectionType"]] = None, connection_scope: Optional[Union[str, "ConnectionScope"]] = None, configs: Optional[Dict[str, str]] = None, custom_configs: Optional[Dict[str, "CustomConnectionConfig"]] = None, expiry_time: Optional[datetime.datetime] = None, **kwargs ): """ :keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :paramtype connection_type: str or ~flow.models.ConnectionType :keyword connection_scope: Possible values include: "User", "WorkspaceShared". :paramtype connection_scope: str or ~flow.models.ConnectionScope :keyword configs: This is a dictionary. :paramtype configs: dict[str, str] :keyword custom_configs: This is a dictionary. :paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime """ super(CreateOrUpdateConnectionRequest, self).__init__(**kwargs) self.connection_type = connection_type self.connection_scope = connection_scope self.configs = configs self.custom_configs = custom_configs self.expiry_time = expiry_time class CreateOrUpdateConnectionRequestDto(msrest.serialization.Model): """CreateOrUpdateConnectionRequestDto. :ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :vartype connection_type: str or ~flow.models.ConnectionType :ivar configs: This is a dictionary. :vartype configs: dict[str, str] :ivar custom_configs: This is a dictionary. :vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :ivar expiry_time: :vartype expiry_time: ~datetime.datetime """ _attribute_map = { 'connection_type': {'key': 'connectionType', 'type': 'str'}, 'configs': {'key': 'configs', 'type': '{str}'}, 'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'}, 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, } def __init__( self, *, connection_type: Optional[Union[str, "ConnectionType"]] = None, configs: Optional[Dict[str, str]] = None, custom_configs: Optional[Dict[str, "CustomConnectionConfig"]] = None, expiry_time: Optional[datetime.datetime] = None, **kwargs ): """ :keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :paramtype connection_type: str or ~flow.models.ConnectionType :keyword configs: This is a dictionary. :paramtype configs: dict[str, str] :keyword custom_configs: This is a dictionary. :paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig] :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime """ super(CreateOrUpdateConnectionRequestDto, self).__init__(**kwargs) self.connection_type = connection_type self.configs = configs self.custom_configs = custom_configs self.expiry_time = expiry_time class CreatePipelineDraftRequest(msrest.serialization.Model): """CreatePipelineDraftRequest. :ivar name: :vartype name: str :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :vartype graph_components_mode: str or ~flow.models.GraphComponentsMode :ivar sub_pipelines_info: :vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo :ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar graph: :vartype graph: ~flow.models.GraphDraftEntity :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar description: :vartype description: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'}, 'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'}, 'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'}, 'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'graph': {'key': 'graph', 'type': 'GraphDraftEntity'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, pipeline_type: Optional[Union[str, "PipelineType"]] = None, pipeline_draft_mode: Optional[Union[str, "PipelineDraftMode"]] = None, graph_components_mode: Optional[Union[str, "GraphComponentsMode"]] = None, sub_pipelines_info: Optional["SubPipelinesInfo"] = None, flattened_sub_graphs: Optional[Dict[str, "PipelineSubDraft"]] = None, pipeline_parameters: Optional[Dict[str, str]] = None, data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None, data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None, asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None, graph: Optional["GraphDraftEntity"] = None, pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None, module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None, module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None, tags: Optional[Dict[str, str]] = None, continue_run_on_step_failure: Optional[bool] = None, description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, enforce_rerun: Optional[bool] = None, dataset_access_modes: Optional[Union[str, "DatasetAccessModes"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode :keyword sub_pipelines_info: :paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo :keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword graph: :paramtype graph: ~flow.models.GraphDraftEntity :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword description: :paramtype description: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ super(CreatePipelineDraftRequest, self).__init__(**kwargs) self.name = name self.pipeline_type = pipeline_type self.pipeline_draft_mode = pipeline_draft_mode self.graph_components_mode = graph_components_mode self.sub_pipelines_info = sub_pipelines_info self.flattened_sub_graphs = flattened_sub_graphs self.pipeline_parameters = pipeline_parameters self.data_path_assignments = data_path_assignments self.data_set_definition_value_assignments = data_set_definition_value_assignments self.asset_output_settings_assignments = asset_output_settings_assignments self.graph = graph self.pipeline_run_settings = pipeline_run_settings self.module_node_run_settings = module_node_run_settings self.module_node_ui_input_settings = module_node_ui_input_settings self.tags = tags self.continue_run_on_step_failure = continue_run_on_step_failure self.description = description self.properties = properties self.enforce_rerun = enforce_rerun self.dataset_access_modes = dataset_access_modes class CreatePipelineJobScheduleDto(msrest.serialization.Model): """CreatePipelineJobScheduleDto. :ivar name: :vartype name: str :ivar pipeline_job_name: :vartype pipeline_job_name: str :ivar pipeline_job_runtime_settings: :vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings :ivar display_name: :vartype display_name: str :ivar trigger_type: Possible values include: "Recurrence", "Cron". :vartype trigger_type: str or ~flow.models.TriggerType :ivar recurrence: :vartype recurrence: ~flow.models.Recurrence :ivar cron: :vartype cron: ~flow.models.Cron :ivar status: Possible values include: "Enabled", "Disabled". :vartype status: str or ~flow.models.ScheduleStatus :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'}, 'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'trigger_type': {'key': 'triggerType', 'type': 'str'}, 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, 'cron': {'key': 'cron', 'type': 'Cron'}, 'status': {'key': 'status', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, *, name: Optional[str] = None, pipeline_job_name: Optional[str] = None, pipeline_job_runtime_settings: Optional["PipelineJobRuntimeBasicSettings"] = None, display_name: Optional[str] = None, trigger_type: Optional[Union[str, "TriggerType"]] = None, recurrence: Optional["Recurrence"] = None, cron: Optional["Cron"] = None, status: Optional[Union[str, "ScheduleStatus"]] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword pipeline_job_name: :paramtype pipeline_job_name: str :keyword pipeline_job_runtime_settings: :paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings :keyword display_name: :paramtype display_name: str :keyword trigger_type: Possible values include: "Recurrence", "Cron". :paramtype trigger_type: str or ~flow.models.TriggerType :keyword recurrence: :paramtype recurrence: ~flow.models.Recurrence :keyword cron: :paramtype cron: ~flow.models.Cron :keyword status: Possible values include: "Enabled", "Disabled". :paramtype status: str or ~flow.models.ScheduleStatus :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(CreatePipelineJobScheduleDto, self).__init__(**kwargs) self.name = name self.pipeline_job_name = pipeline_job_name self.pipeline_job_runtime_settings = pipeline_job_runtime_settings self.display_name = display_name self.trigger_type = trigger_type self.recurrence = recurrence self.cron = cron self.status = status self.description = description self.tags = tags self.properties = properties class CreatePublishedPipelineRequest(msrest.serialization.Model): """CreatePublishedPipelineRequest. :ivar use_pipeline_endpoint: :vartype use_pipeline_endpoint: bool :ivar pipeline_name: :vartype pipeline_name: str :ivar pipeline_description: :vartype pipeline_description: str :ivar use_existing_pipeline_endpoint: :vartype use_existing_pipeline_endpoint: bool :ivar pipeline_endpoint_name: :vartype pipeline_endpoint_name: str :ivar pipeline_endpoint_description: :vartype pipeline_endpoint_description: str :ivar set_as_default_pipeline_for_endpoint: :vartype set_as_default_pipeline_for_endpoint: bool :ivar step_tags: This is a dictionary. :vartype step_tags: dict[str, str] :ivar experiment_name: :vartype experiment_name: str :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar enable_notification: :vartype enable_notification: bool :ivar sub_pipelines_info: :vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo :ivar display_name: :vartype display_name: str :ivar run_id: :vartype run_id: str :ivar parent_run_id: :vartype parent_run_id: str :ivar graph: :vartype graph: ~flow.models.GraphDraftEntity :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar description: :vartype description: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ _attribute_map = { 'use_pipeline_endpoint': {'key': 'usePipelineEndpoint', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'pipeline_description': {'key': 'pipelineDescription', 'type': 'str'}, 'use_existing_pipeline_endpoint': {'key': 'useExistingPipelineEndpoint', 'type': 'bool'}, 'pipeline_endpoint_name': {'key': 'pipelineEndpointName', 'type': 'str'}, 'pipeline_endpoint_description': {'key': 'pipelineEndpointDescription', 'type': 'str'}, 'set_as_default_pipeline_for_endpoint': {'key': 'setAsDefaultPipelineForEndpoint', 'type': 'bool'}, 'step_tags': {'key': 'stepTags', 'type': '{str}'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'enable_notification': {'key': 'enableNotification', 'type': 'bool'}, 'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'graph': {'key': 'graph', 'type': 'GraphDraftEntity'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'}, } def __init__( self, *, use_pipeline_endpoint: Optional[bool] = None, pipeline_name: Optional[str] = None, pipeline_description: Optional[str] = None, use_existing_pipeline_endpoint: Optional[bool] = None, pipeline_endpoint_name: Optional[str] = None, pipeline_endpoint_description: Optional[str] = None, set_as_default_pipeline_for_endpoint: Optional[bool] = None, step_tags: Optional[Dict[str, str]] = None, experiment_name: Optional[str] = None, pipeline_parameters: Optional[Dict[str, str]] = None, data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None, data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None, asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None, enable_notification: Optional[bool] = None, sub_pipelines_info: Optional["SubPipelinesInfo"] = None, display_name: Optional[str] = None, run_id: Optional[str] = None, parent_run_id: Optional[str] = None, graph: Optional["GraphDraftEntity"] = None, pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None, module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None, module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None, tags: Optional[Dict[str, str]] = None, continue_run_on_step_failure: Optional[bool] = None, description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, enforce_rerun: Optional[bool] = None, dataset_access_modes: Optional[Union[str, "DatasetAccessModes"]] = None, **kwargs ): """ :keyword use_pipeline_endpoint: :paramtype use_pipeline_endpoint: bool :keyword pipeline_name: :paramtype pipeline_name: str :keyword pipeline_description: :paramtype pipeline_description: str :keyword use_existing_pipeline_endpoint: :paramtype use_existing_pipeline_endpoint: bool :keyword pipeline_endpoint_name: :paramtype pipeline_endpoint_name: str :keyword pipeline_endpoint_description: :paramtype pipeline_endpoint_description: str :keyword set_as_default_pipeline_for_endpoint: :paramtype set_as_default_pipeline_for_endpoint: bool :keyword step_tags: This is a dictionary. :paramtype step_tags: dict[str, str] :keyword experiment_name: :paramtype experiment_name: str :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword enable_notification: :paramtype enable_notification: bool :keyword sub_pipelines_info: :paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo :keyword display_name: :paramtype display_name: str :keyword run_id: :paramtype run_id: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword graph: :paramtype graph: ~flow.models.GraphDraftEntity :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword description: :paramtype description: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ super(CreatePublishedPipelineRequest, self).__init__(**kwargs) self.use_pipeline_endpoint = use_pipeline_endpoint self.pipeline_name = pipeline_name self.pipeline_description = pipeline_description self.use_existing_pipeline_endpoint = use_existing_pipeline_endpoint self.pipeline_endpoint_name = pipeline_endpoint_name self.pipeline_endpoint_description = pipeline_endpoint_description self.set_as_default_pipeline_for_endpoint = set_as_default_pipeline_for_endpoint self.step_tags = step_tags self.experiment_name = experiment_name self.pipeline_parameters = pipeline_parameters self.data_path_assignments = data_path_assignments self.data_set_definition_value_assignments = data_set_definition_value_assignments self.asset_output_settings_assignments = asset_output_settings_assignments self.enable_notification = enable_notification self.sub_pipelines_info = sub_pipelines_info self.display_name = display_name self.run_id = run_id self.parent_run_id = parent_run_id self.graph = graph self.pipeline_run_settings = pipeline_run_settings self.module_node_run_settings = module_node_run_settings self.module_node_ui_input_settings = module_node_ui_input_settings self.tags = tags self.continue_run_on_step_failure = continue_run_on_step_failure self.description = description self.properties = properties self.enforce_rerun = enforce_rerun self.dataset_access_modes = dataset_access_modes class CreateRealTimeEndpointRequest(msrest.serialization.Model): """CreateRealTimeEndpointRequest. :ivar name: :vartype name: str :ivar compute_info: :vartype compute_info: ~flow.models.ComputeInfo :ivar description: :vartype description: str :ivar linked_pipeline_draft_id: :vartype linked_pipeline_draft_id: str :ivar linked_pipeline_run_id: :vartype linked_pipeline_run_id: str :ivar aks_advance_settings: :vartype aks_advance_settings: ~flow.models.AKSAdvanceSettings :ivar aci_advance_settings: :vartype aci_advance_settings: ~flow.models.ACIAdvanceSettings :ivar linked_training_pipeline_run_id: :vartype linked_training_pipeline_run_id: str :ivar linked_experiment_name: :vartype linked_experiment_name: str :ivar graph_nodes_run_id_mapping: This is a dictionary. :vartype graph_nodes_run_id_mapping: dict[str, str] :ivar workflow: :vartype workflow: ~flow.models.PipelineGraph :ivar inputs: :vartype inputs: list[~flow.models.InputOutputPortMetadata] :ivar outputs: :vartype outputs: list[~flow.models.InputOutputPortMetadata] :ivar example_request: :vartype example_request: ~flow.models.ExampleRequest :ivar user_storage_connection_string: :vartype user_storage_connection_string: str :ivar user_storage_endpoint_uri: :vartype user_storage_endpoint_uri: str :ivar user_storage_workspace_sai_token: :vartype user_storage_workspace_sai_token: str :ivar user_storage_container_name: :vartype user_storage_container_name: str :ivar pipeline_run_id: :vartype pipeline_run_id: str :ivar root_pipeline_run_id: :vartype root_pipeline_run_id: str :ivar experiment_name: :vartype experiment_name: str :ivar experiment_id: :vartype experiment_id: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'compute_info': {'key': 'computeInfo', 'type': 'ComputeInfo'}, 'description': {'key': 'description', 'type': 'str'}, 'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'}, 'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'}, 'aks_advance_settings': {'key': 'aksAdvanceSettings', 'type': 'AKSAdvanceSettings'}, 'aci_advance_settings': {'key': 'aciAdvanceSettings', 'type': 'ACIAdvanceSettings'}, 'linked_training_pipeline_run_id': {'key': 'linkedTrainingPipelineRunId', 'type': 'str'}, 'linked_experiment_name': {'key': 'linkedExperimentName', 'type': 'str'}, 'graph_nodes_run_id_mapping': {'key': 'graphNodesRunIdMapping', 'type': '{str}'}, 'workflow': {'key': 'workflow', 'type': 'PipelineGraph'}, 'inputs': {'key': 'inputs', 'type': '[InputOutputPortMetadata]'}, 'outputs': {'key': 'outputs', 'type': '[InputOutputPortMetadata]'}, 'example_request': {'key': 'exampleRequest', 'type': 'ExampleRequest'}, 'user_storage_connection_string': {'key': 'userStorageConnectionString', 'type': 'str'}, 'user_storage_endpoint_uri': {'key': 'userStorageEndpointUri', 'type': 'str'}, 'user_storage_workspace_sai_token': {'key': 'userStorageWorkspaceSaiToken', 'type': 'str'}, 'user_storage_container_name': {'key': 'userStorageContainerName', 'type': 'str'}, 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, 'root_pipeline_run_id': {'key': 'rootPipelineRunId', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, compute_info: Optional["ComputeInfo"] = None, description: Optional[str] = None, linked_pipeline_draft_id: Optional[str] = None, linked_pipeline_run_id: Optional[str] = None, aks_advance_settings: Optional["AKSAdvanceSettings"] = None, aci_advance_settings: Optional["ACIAdvanceSettings"] = None, linked_training_pipeline_run_id: Optional[str] = None, linked_experiment_name: Optional[str] = None, graph_nodes_run_id_mapping: Optional[Dict[str, str]] = None, workflow: Optional["PipelineGraph"] = None, inputs: Optional[List["InputOutputPortMetadata"]] = None, outputs: Optional[List["InputOutputPortMetadata"]] = None, example_request: Optional["ExampleRequest"] = None, user_storage_connection_string: Optional[str] = None, user_storage_endpoint_uri: Optional[str] = None, user_storage_workspace_sai_token: Optional[str] = None, user_storage_container_name: Optional[str] = None, pipeline_run_id: Optional[str] = None, root_pipeline_run_id: Optional[str] = None, experiment_name: Optional[str] = None, experiment_id: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword compute_info: :paramtype compute_info: ~flow.models.ComputeInfo :keyword description: :paramtype description: str :keyword linked_pipeline_draft_id: :paramtype linked_pipeline_draft_id: str :keyword linked_pipeline_run_id: :paramtype linked_pipeline_run_id: str :keyword aks_advance_settings: :paramtype aks_advance_settings: ~flow.models.AKSAdvanceSettings :keyword aci_advance_settings: :paramtype aci_advance_settings: ~flow.models.ACIAdvanceSettings :keyword linked_training_pipeline_run_id: :paramtype linked_training_pipeline_run_id: str :keyword linked_experiment_name: :paramtype linked_experiment_name: str :keyword graph_nodes_run_id_mapping: This is a dictionary. :paramtype graph_nodes_run_id_mapping: dict[str, str] :keyword workflow: :paramtype workflow: ~flow.models.PipelineGraph :keyword inputs: :paramtype inputs: list[~flow.models.InputOutputPortMetadata] :keyword outputs: :paramtype outputs: list[~flow.models.InputOutputPortMetadata] :keyword example_request: :paramtype example_request: ~flow.models.ExampleRequest :keyword user_storage_connection_string: :paramtype user_storage_connection_string: str :keyword user_storage_endpoint_uri: :paramtype user_storage_endpoint_uri: str :keyword user_storage_workspace_sai_token: :paramtype user_storage_workspace_sai_token: str :keyword user_storage_container_name: :paramtype user_storage_container_name: str :keyword pipeline_run_id: :paramtype pipeline_run_id: str :keyword root_pipeline_run_id: :paramtype root_pipeline_run_id: str :keyword experiment_name: :paramtype experiment_name: str :keyword experiment_id: :paramtype experiment_id: str """ super(CreateRealTimeEndpointRequest, self).__init__(**kwargs) self.name = name self.compute_info = compute_info self.description = description self.linked_pipeline_draft_id = linked_pipeline_draft_id self.linked_pipeline_run_id = linked_pipeline_run_id self.aks_advance_settings = aks_advance_settings self.aci_advance_settings = aci_advance_settings self.linked_training_pipeline_run_id = linked_training_pipeline_run_id self.linked_experiment_name = linked_experiment_name self.graph_nodes_run_id_mapping = graph_nodes_run_id_mapping self.workflow = workflow self.inputs = inputs self.outputs = outputs self.example_request = example_request self.user_storage_connection_string = user_storage_connection_string self.user_storage_endpoint_uri = user_storage_endpoint_uri self.user_storage_workspace_sai_token = user_storage_workspace_sai_token self.user_storage_container_name = user_storage_container_name self.pipeline_run_id = pipeline_run_id self.root_pipeline_run_id = root_pipeline_run_id self.experiment_name = experiment_name self.experiment_id = experiment_id class CreationContext(msrest.serialization.Model): """CreationContext. :ivar created_time: :vartype created_time: ~datetime.datetime :ivar created_by: :vartype created_by: ~flow.models.SchemaContractsCreatedBy :ivar creation_source: :vartype creation_source: str """ _attribute_map = { 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'}, 'creation_source': {'key': 'creationSource', 'type': 'str'}, } def __init__( self, *, created_time: Optional[datetime.datetime] = None, created_by: Optional["SchemaContractsCreatedBy"] = None, creation_source: Optional[str] = None, **kwargs ): """ :keyword created_time: :paramtype created_time: ~datetime.datetime :keyword created_by: :paramtype created_by: ~flow.models.SchemaContractsCreatedBy :keyword creation_source: :paramtype creation_source: str """ super(CreationContext, self).__init__(**kwargs) self.created_time = created_time self.created_by = created_by self.creation_source = creation_source class Cron(msrest.serialization.Model): """Cron. :ivar expression: :vartype expression: str :ivar end_time: :vartype end_time: str :ivar start_time: :vartype start_time: str :ivar time_zone: :vartype time_zone: str """ _attribute_map = { 'expression': {'key': 'expression', 'type': 'str'}, 'end_time': {'key': 'endTime', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'str'}, 'time_zone': {'key': 'timeZone', 'type': 'str'}, } def __init__( self, *, expression: Optional[str] = None, end_time: Optional[str] = None, start_time: Optional[str] = None, time_zone: Optional[str] = None, **kwargs ): """ :keyword expression: :paramtype expression: str :keyword end_time: :paramtype end_time: str :keyword start_time: :paramtype start_time: str :keyword time_zone: :paramtype time_zone: str """ super(Cron, self).__init__(**kwargs) self.expression = expression self.end_time = end_time self.start_time = start_time self.time_zone = time_zone class CustomConnectionConfig(msrest.serialization.Model): """CustomConnectionConfig. :ivar config_value_type: Possible values include: "String", "Secret". :vartype config_value_type: str or ~flow.models.ConfigValueType :ivar value: :vartype value: str """ _attribute_map = { 'config_value_type': {'key': 'configValueType', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, *, config_value_type: Optional[Union[str, "ConfigValueType"]] = None, value: Optional[str] = None, **kwargs ): """ :keyword config_value_type: Possible values include: "String", "Secret". :paramtype config_value_type: str or ~flow.models.ConfigValueType :keyword value: :paramtype value: str """ super(CustomConnectionConfig, self).__init__(**kwargs) self.config_value_type = config_value_type self.value = value class CustomReference(msrest.serialization.Model): """CustomReference. :ivar aml_data_store_name: :vartype aml_data_store_name: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, *, aml_data_store_name: Optional[str] = None, relative_path: Optional[str] = None, **kwargs ): """ :keyword aml_data_store_name: :paramtype aml_data_store_name: str :keyword relative_path: :paramtype relative_path: str """ super(CustomReference, self).__init__(**kwargs) self.aml_data_store_name = aml_data_store_name self.relative_path = relative_path class Data(msrest.serialization.Model): """Data. :ivar data_location: :vartype data_location: ~flow.models.ExecutionDataLocation :ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs". :vartype mechanism: str or ~flow.models.DeliveryMechanism :ivar environment_variable_name: :vartype environment_variable_name: str :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool :ivar options: Dictionary of :code:`<string>`. :vartype options: dict[str, str] """ _attribute_map = { 'data_location': {'key': 'dataLocation', 'type': 'ExecutionDataLocation'}, 'mechanism': {'key': 'mechanism', 'type': 'str'}, 'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'options': {'key': 'options', 'type': '{str}'}, } def __init__( self, *, data_location: Optional["ExecutionDataLocation"] = None, mechanism: Optional[Union[str, "DeliveryMechanism"]] = None, environment_variable_name: Optional[str] = None, path_on_compute: Optional[str] = None, overwrite: Optional[bool] = None, options: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword data_location: :paramtype data_location: ~flow.models.ExecutionDataLocation :keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs". :paramtype mechanism: str or ~flow.models.DeliveryMechanism :keyword environment_variable_name: :paramtype environment_variable_name: str :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool :keyword options: Dictionary of :code:`<string>`. :paramtype options: dict[str, str] """ super(Data, self).__init__(**kwargs) self.data_location = data_location self.mechanism = mechanism self.environment_variable_name = environment_variable_name self.path_on_compute = path_on_compute self.overwrite = overwrite self.options = options class DatabaseSink(msrest.serialization.Model): """DatabaseSink. :ivar connection: :vartype connection: str :ivar table: :vartype table: str """ _attribute_map = { 'connection': {'key': 'connection', 'type': 'str'}, 'table': {'key': 'table', 'type': 'str'}, } def __init__( self, *, connection: Optional[str] = None, table: Optional[str] = None, **kwargs ): """ :keyword connection: :paramtype connection: str :keyword table: :paramtype table: str """ super(DatabaseSink, self).__init__(**kwargs) self.connection = connection self.table = table class DatabaseSource(msrest.serialization.Model): """DatabaseSource. :ivar connection: :vartype connection: str :ivar query: :vartype query: str :ivar stored_procedure_name: :vartype stored_procedure_name: str :ivar stored_procedure_parameters: :vartype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter] """ _attribute_map = { 'connection': {'key': 'connection', 'type': 'str'}, 'query': {'key': 'query', 'type': 'str'}, 'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[StoredProcedureParameter]'}, } def __init__( self, *, connection: Optional[str] = None, query: Optional[str] = None, stored_procedure_name: Optional[str] = None, stored_procedure_parameters: Optional[List["StoredProcedureParameter"]] = None, **kwargs ): """ :keyword connection: :paramtype connection: str :keyword query: :paramtype query: str :keyword stored_procedure_name: :paramtype stored_procedure_name: str :keyword stored_procedure_parameters: :paramtype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter] """ super(DatabaseSource, self).__init__(**kwargs) self.connection = connection self.query = query self.stored_procedure_name = stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters class DatabricksComputeInfo(msrest.serialization.Model): """DatabricksComputeInfo. :ivar existing_cluster_id: :vartype existing_cluster_id: str """ _attribute_map = { 'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'}, } def __init__( self, *, existing_cluster_id: Optional[str] = None, **kwargs ): """ :keyword existing_cluster_id: :paramtype existing_cluster_id: str """ super(DatabricksComputeInfo, self).__init__(**kwargs) self.existing_cluster_id = existing_cluster_id class DatabricksConfiguration(msrest.serialization.Model): """DatabricksConfiguration. :ivar workers: :vartype workers: int :ivar minimum_worker_count: :vartype minimum_worker_count: int :ivar max_mum_worker_count: :vartype max_mum_worker_count: int :ivar spark_version: :vartype spark_version: str :ivar node_type_id: :vartype node_type_id: str :ivar spark_conf: Dictionary of :code:`<string>`. :vartype spark_conf: dict[str, str] :ivar spark_env_vars: Dictionary of :code:`<string>`. :vartype spark_env_vars: dict[str, str] :ivar cluster_log_conf_dbfs_path: :vartype cluster_log_conf_dbfs_path: str :ivar dbfs_init_scripts: :vartype dbfs_init_scripts: list[~flow.models.InitScriptInfoDto] :ivar instance_pool_id: :vartype instance_pool_id: str :ivar timeout_seconds: :vartype timeout_seconds: int :ivar notebook_task: :vartype notebook_task: ~flow.models.NoteBookTaskDto :ivar spark_python_task: :vartype spark_python_task: ~flow.models.SparkPythonTaskDto :ivar spark_jar_task: :vartype spark_jar_task: ~flow.models.SparkJarTaskDto :ivar spark_submit_task: :vartype spark_submit_task: ~flow.models.SparkSubmitTaskDto :ivar jar_libraries: :vartype jar_libraries: list[str] :ivar egg_libraries: :vartype egg_libraries: list[str] :ivar whl_libraries: :vartype whl_libraries: list[str] :ivar pypi_libraries: :vartype pypi_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto] :ivar r_cran_libraries: :vartype r_cran_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto] :ivar maven_libraries: :vartype maven_libraries: list[~flow.models.MavenLibraryDto] :ivar libraries: :vartype libraries: list[any] :ivar linked_adb_workspace_metadata: :vartype linked_adb_workspace_metadata: ~flow.models.LinkedADBWorkspaceMetadata :ivar databrick_resource_id: :vartype databrick_resource_id: str :ivar auto_scale: :vartype auto_scale: bool """ _attribute_map = { 'workers': {'key': 'workers', 'type': 'int'}, 'minimum_worker_count': {'key': 'minimumWorkerCount', 'type': 'int'}, 'max_mum_worker_count': {'key': 'maxMumWorkerCount', 'type': 'int'}, 'spark_version': {'key': 'sparkVersion', 'type': 'str'}, 'node_type_id': {'key': 'nodeTypeId', 'type': 'str'}, 'spark_conf': {'key': 'sparkConf', 'type': '{str}'}, 'spark_env_vars': {'key': 'sparkEnvVars', 'type': '{str}'}, 'cluster_log_conf_dbfs_path': {'key': 'clusterLogConfDbfsPath', 'type': 'str'}, 'dbfs_init_scripts': {'key': 'dbfsInitScripts', 'type': '[InitScriptInfoDto]'}, 'instance_pool_id': {'key': 'instancePoolId', 'type': 'str'}, 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'}, 'notebook_task': {'key': 'notebookTask', 'type': 'NoteBookTaskDto'}, 'spark_python_task': {'key': 'sparkPythonTask', 'type': 'SparkPythonTaskDto'}, 'spark_jar_task': {'key': 'sparkJarTask', 'type': 'SparkJarTaskDto'}, 'spark_submit_task': {'key': 'sparkSubmitTask', 'type': 'SparkSubmitTaskDto'}, 'jar_libraries': {'key': 'jarLibraries', 'type': '[str]'}, 'egg_libraries': {'key': 'eggLibraries', 'type': '[str]'}, 'whl_libraries': {'key': 'whlLibraries', 'type': '[str]'}, 'pypi_libraries': {'key': 'pypiLibraries', 'type': '[PythonPyPiOrRCranLibraryDto]'}, 'r_cran_libraries': {'key': 'rCranLibraries', 'type': '[PythonPyPiOrRCranLibraryDto]'}, 'maven_libraries': {'key': 'mavenLibraries', 'type': '[MavenLibraryDto]'}, 'libraries': {'key': 'libraries', 'type': '[object]'}, 'linked_adb_workspace_metadata': {'key': 'linkedADBWorkspaceMetadata', 'type': 'LinkedADBWorkspaceMetadata'}, 'databrick_resource_id': {'key': 'databrickResourceId', 'type': 'str'}, 'auto_scale': {'key': 'autoScale', 'type': 'bool'}, } def __init__( self, *, workers: Optional[int] = None, minimum_worker_count: Optional[int] = None, max_mum_worker_count: Optional[int] = None, spark_version: Optional[str] = None, node_type_id: Optional[str] = None, spark_conf: Optional[Dict[str, str]] = None, spark_env_vars: Optional[Dict[str, str]] = None, cluster_log_conf_dbfs_path: Optional[str] = None, dbfs_init_scripts: Optional[List["InitScriptInfoDto"]] = None, instance_pool_id: Optional[str] = None, timeout_seconds: Optional[int] = None, notebook_task: Optional["NoteBookTaskDto"] = None, spark_python_task: Optional["SparkPythonTaskDto"] = None, spark_jar_task: Optional["SparkJarTaskDto"] = None, spark_submit_task: Optional["SparkSubmitTaskDto"] = None, jar_libraries: Optional[List[str]] = None, egg_libraries: Optional[List[str]] = None, whl_libraries: Optional[List[str]] = None, pypi_libraries: Optional[List["PythonPyPiOrRCranLibraryDto"]] = None, r_cran_libraries: Optional[List["PythonPyPiOrRCranLibraryDto"]] = None, maven_libraries: Optional[List["MavenLibraryDto"]] = None, libraries: Optional[List[Any]] = None, linked_adb_workspace_metadata: Optional["LinkedADBWorkspaceMetadata"] = None, databrick_resource_id: Optional[str] = None, auto_scale: Optional[bool] = None, **kwargs ): """ :keyword workers: :paramtype workers: int :keyword minimum_worker_count: :paramtype minimum_worker_count: int :keyword max_mum_worker_count: :paramtype max_mum_worker_count: int :keyword spark_version: :paramtype spark_version: str :keyword node_type_id: :paramtype node_type_id: str :keyword spark_conf: Dictionary of :code:`<string>`. :paramtype spark_conf: dict[str, str] :keyword spark_env_vars: Dictionary of :code:`<string>`. :paramtype spark_env_vars: dict[str, str] :keyword cluster_log_conf_dbfs_path: :paramtype cluster_log_conf_dbfs_path: str :keyword dbfs_init_scripts: :paramtype dbfs_init_scripts: list[~flow.models.InitScriptInfoDto] :keyword instance_pool_id: :paramtype instance_pool_id: str :keyword timeout_seconds: :paramtype timeout_seconds: int :keyword notebook_task: :paramtype notebook_task: ~flow.models.NoteBookTaskDto :keyword spark_python_task: :paramtype spark_python_task: ~flow.models.SparkPythonTaskDto :keyword spark_jar_task: :paramtype spark_jar_task: ~flow.models.SparkJarTaskDto :keyword spark_submit_task: :paramtype spark_submit_task: ~flow.models.SparkSubmitTaskDto :keyword jar_libraries: :paramtype jar_libraries: list[str] :keyword egg_libraries: :paramtype egg_libraries: list[str] :keyword whl_libraries: :paramtype whl_libraries: list[str] :keyword pypi_libraries: :paramtype pypi_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto] :keyword r_cran_libraries: :paramtype r_cran_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto] :keyword maven_libraries: :paramtype maven_libraries: list[~flow.models.MavenLibraryDto] :keyword libraries: :paramtype libraries: list[any] :keyword linked_adb_workspace_metadata: :paramtype linked_adb_workspace_metadata: ~flow.models.LinkedADBWorkspaceMetadata :keyword databrick_resource_id: :paramtype databrick_resource_id: str :keyword auto_scale: :paramtype auto_scale: bool """ super(DatabricksConfiguration, self).__init__(**kwargs) self.workers = workers self.minimum_worker_count = minimum_worker_count self.max_mum_worker_count = max_mum_worker_count self.spark_version = spark_version self.node_type_id = node_type_id self.spark_conf = spark_conf self.spark_env_vars = spark_env_vars self.cluster_log_conf_dbfs_path = cluster_log_conf_dbfs_path self.dbfs_init_scripts = dbfs_init_scripts self.instance_pool_id = instance_pool_id self.timeout_seconds = timeout_seconds self.notebook_task = notebook_task self.spark_python_task = spark_python_task self.spark_jar_task = spark_jar_task self.spark_submit_task = spark_submit_task self.jar_libraries = jar_libraries self.egg_libraries = egg_libraries self.whl_libraries = whl_libraries self.pypi_libraries = pypi_libraries self.r_cran_libraries = r_cran_libraries self.maven_libraries = maven_libraries self.libraries = libraries self.linked_adb_workspace_metadata = linked_adb_workspace_metadata self.databrick_resource_id = databrick_resource_id self.auto_scale = auto_scale class DatacacheConfiguration(msrest.serialization.Model): """DatacacheConfiguration. :ivar datacache_id: :vartype datacache_id: str :ivar datacache_store: :vartype datacache_store: str :ivar dataset_id: :vartype dataset_id: str :ivar mode: The only acceptable values to pass in are None and "Mount". The default value is None. :vartype mode: str :ivar replica: :vartype replica: int :ivar failure_fallback: :vartype failure_fallback: bool :ivar path_on_compute: :vartype path_on_compute: str """ _attribute_map = { 'datacache_id': {'key': 'datacacheId', 'type': 'str'}, 'datacache_store': {'key': 'datacacheStore', 'type': 'str'}, 'dataset_id': {'key': 'datasetId', 'type': 'str'}, 'mode': {'key': 'mode', 'type': 'str'}, 'replica': {'key': 'replica', 'type': 'int'}, 'failure_fallback': {'key': 'failureFallback', 'type': 'bool'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, } def __init__( self, *, datacache_id: Optional[str] = None, datacache_store: Optional[str] = None, dataset_id: Optional[str] = None, mode: Optional[str] = None, replica: Optional[int] = None, failure_fallback: Optional[bool] = None, path_on_compute: Optional[str] = None, **kwargs ): """ :keyword datacache_id: :paramtype datacache_id: str :keyword datacache_store: :paramtype datacache_store: str :keyword dataset_id: :paramtype dataset_id: str :keyword mode: The only acceptable values to pass in are None and "Mount". The default value is None. :paramtype mode: str :keyword replica: :paramtype replica: int :keyword failure_fallback: :paramtype failure_fallback: bool :keyword path_on_compute: :paramtype path_on_compute: str """ super(DatacacheConfiguration, self).__init__(**kwargs) self.datacache_id = datacache_id self.datacache_store = datacache_store self.dataset_id = dataset_id self.mode = mode self.replica = replica self.failure_fallback = failure_fallback self.path_on_compute = path_on_compute class DataInfo(msrest.serialization.Model): """DataInfo. :ivar feed_name: :vartype feed_name: str :ivar id: :vartype id: str :ivar data_source_type: Possible values include: "None", "PipelineDataSource", "AmlDataset", "GlobalDataset", "FeedModel", "FeedDataset", "AmlDataVersion", "AMLModelVersion". :vartype data_source_type: str or ~flow.models.DataSourceType :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar data_type_id: :vartype data_type_id: str :ivar aml_data_store_name: :vartype aml_data_store_name: str :ivar relative_path: :vartype relative_path: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar modified_date: :vartype modified_date: ~datetime.datetime :ivar registered_by: :vartype registered_by: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar created_by_studio: :vartype created_by_studio: bool :ivar data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :vartype data_reference_type: str or ~flow.models.DataReferenceType :ivar dataset_type: :vartype dataset_type: str :ivar saved_dataset_id: :vartype saved_dataset_id: str :ivar dataset_version_id: :vartype dataset_version_id: str :ivar is_visible: :vartype is_visible: bool :ivar is_registered: :vartype is_registered: bool :ivar properties: This is a dictionary. :vartype properties: dict[str, any] :ivar connection_string: :vartype connection_string: str :ivar container_name: :vartype container_name: str :ivar data_storage_endpoint_uri: :vartype data_storage_endpoint_uri: str :ivar workspace_sai_token: :vartype workspace_sai_token: str :ivar aml_dataset_data_flow: :vartype aml_dataset_data_flow: str :ivar system_data: :vartype system_data: ~flow.models.SystemData :ivar arm_id: :vartype arm_id: str :ivar asset_id: :vartype asset_id: str :ivar asset_uri: :vartype asset_uri: str :ivar asset_type: :vartype asset_type: str :ivar is_data_v2: :vartype is_data_v2: bool :ivar asset_scope_type: Possible values include: "Workspace", "Global", "All", "Feed". :vartype asset_scope_type: str or ~flow.models.AssetScopeTypes :ivar pipeline_run_id: :vartype pipeline_run_id: str :ivar module_node_id: :vartype module_node_id: str :ivar output_port_name: :vartype output_port_name: str """ _attribute_map = { 'feed_name': {'key': 'feedName', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'data_type_id': {'key': 'dataTypeId', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'modified_date': {'key': 'modifiedDate', 'type': 'iso-8601'}, 'registered_by': {'key': 'registeredBy', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'}, 'data_reference_type': {'key': 'dataReferenceType', 'type': 'str'}, 'dataset_type': {'key': 'datasetType', 'type': 'str'}, 'saved_dataset_id': {'key': 'savedDatasetId', 'type': 'str'}, 'dataset_version_id': {'key': 'datasetVersionId', 'type': 'str'}, 'is_visible': {'key': 'isVisible', 'type': 'bool'}, 'is_registered': {'key': 'isRegistered', 'type': 'bool'}, 'properties': {'key': 'properties', 'type': '{object}'}, 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'container_name': {'key': 'containerName', 'type': 'str'}, 'data_storage_endpoint_uri': {'key': 'dataStorageEndpointUri', 'type': 'str'}, 'workspace_sai_token': {'key': 'workspaceSaiToken', 'type': 'str'}, 'aml_dataset_data_flow': {'key': 'amlDatasetDataFlow', 'type': 'str'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'arm_id': {'key': 'armId', 'type': 'str'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, 'asset_uri': {'key': 'assetUri', 'type': 'str'}, 'asset_type': {'key': 'assetType', 'type': 'str'}, 'is_data_v2': {'key': 'isDataV2', 'type': 'bool'}, 'asset_scope_type': {'key': 'assetScopeType', 'type': 'str'}, 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, 'module_node_id': {'key': 'moduleNodeId', 'type': 'str'}, 'output_port_name': {'key': 'outputPortName', 'type': 'str'}, } def __init__( self, *, feed_name: Optional[str] = None, id: Optional[str] = None, data_source_type: Optional[Union[str, "DataSourceType"]] = None, name: Optional[str] = None, description: Optional[str] = None, data_type_id: Optional[str] = None, aml_data_store_name: Optional[str] = None, relative_path: Optional[str] = None, created_date: Optional[datetime.datetime] = None, modified_date: Optional[datetime.datetime] = None, registered_by: Optional[str] = None, tags: Optional[Dict[str, str]] = None, created_by_studio: Optional[bool] = None, data_reference_type: Optional[Union[str, "DataReferenceType"]] = None, dataset_type: Optional[str] = None, saved_dataset_id: Optional[str] = None, dataset_version_id: Optional[str] = None, is_visible: Optional[bool] = None, is_registered: Optional[bool] = None, properties: Optional[Dict[str, Any]] = None, connection_string: Optional[str] = None, container_name: Optional[str] = None, data_storage_endpoint_uri: Optional[str] = None, workspace_sai_token: Optional[str] = None, aml_dataset_data_flow: Optional[str] = None, system_data: Optional["SystemData"] = None, arm_id: Optional[str] = None, asset_id: Optional[str] = None, asset_uri: Optional[str] = None, asset_type: Optional[str] = None, is_data_v2: Optional[bool] = None, asset_scope_type: Optional[Union[str, "AssetScopeTypes"]] = None, pipeline_run_id: Optional[str] = None, module_node_id: Optional[str] = None, output_port_name: Optional[str] = None, **kwargs ): """ :keyword feed_name: :paramtype feed_name: str :keyword id: :paramtype id: str :keyword data_source_type: Possible values include: "None", "PipelineDataSource", "AmlDataset", "GlobalDataset", "FeedModel", "FeedDataset", "AmlDataVersion", "AMLModelVersion". :paramtype data_source_type: str or ~flow.models.DataSourceType :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword data_type_id: :paramtype data_type_id: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str :keyword relative_path: :paramtype relative_path: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword modified_date: :paramtype modified_date: ~datetime.datetime :keyword registered_by: :paramtype registered_by: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword created_by_studio: :paramtype created_by_studio: bool :keyword data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :paramtype data_reference_type: str or ~flow.models.DataReferenceType :keyword dataset_type: :paramtype dataset_type: str :keyword saved_dataset_id: :paramtype saved_dataset_id: str :keyword dataset_version_id: :paramtype dataset_version_id: str :keyword is_visible: :paramtype is_visible: bool :keyword is_registered: :paramtype is_registered: bool :keyword properties: This is a dictionary. :paramtype properties: dict[str, any] :keyword connection_string: :paramtype connection_string: str :keyword container_name: :paramtype container_name: str :keyword data_storage_endpoint_uri: :paramtype data_storage_endpoint_uri: str :keyword workspace_sai_token: :paramtype workspace_sai_token: str :keyword aml_dataset_data_flow: :paramtype aml_dataset_data_flow: str :keyword system_data: :paramtype system_data: ~flow.models.SystemData :keyword arm_id: :paramtype arm_id: str :keyword asset_id: :paramtype asset_id: str :keyword asset_uri: :paramtype asset_uri: str :keyword asset_type: :paramtype asset_type: str :keyword is_data_v2: :paramtype is_data_v2: bool :keyword asset_scope_type: Possible values include: "Workspace", "Global", "All", "Feed". :paramtype asset_scope_type: str or ~flow.models.AssetScopeTypes :keyword pipeline_run_id: :paramtype pipeline_run_id: str :keyword module_node_id: :paramtype module_node_id: str :keyword output_port_name: :paramtype output_port_name: str """ super(DataInfo, self).__init__(**kwargs) self.feed_name = feed_name self.id = id self.data_source_type = data_source_type self.name = name self.description = description self.data_type_id = data_type_id self.aml_data_store_name = aml_data_store_name self.relative_path = relative_path self.created_date = created_date self.modified_date = modified_date self.registered_by = registered_by self.tags = tags self.created_by_studio = created_by_studio self.data_reference_type = data_reference_type self.dataset_type = dataset_type self.saved_dataset_id = saved_dataset_id self.dataset_version_id = dataset_version_id self.is_visible = is_visible self.is_registered = is_registered self.properties = properties self.connection_string = connection_string self.container_name = container_name self.data_storage_endpoint_uri = data_storage_endpoint_uri self.workspace_sai_token = workspace_sai_token self.aml_dataset_data_flow = aml_dataset_data_flow self.system_data = system_data self.arm_id = arm_id self.asset_id = asset_id self.asset_uri = asset_uri self.asset_type = asset_type self.is_data_v2 = is_data_v2 self.asset_scope_type = asset_scope_type self.pipeline_run_id = pipeline_run_id self.module_node_id = module_node_id self.output_port_name = output_port_name class DataLocation(msrest.serialization.Model): """DataLocation. :ivar storage_type: Possible values include: "None", "AzureBlob", "Artifact", "Snapshot", "SavedAmlDataset", "Asset". :vartype storage_type: str or ~flow.models.DataLocationStorageType :ivar storage_id: :vartype storage_id: str :ivar uri: :vartype uri: str :ivar data_store_name: :vartype data_store_name: str :ivar data_reference: :vartype data_reference: ~flow.models.DataReference :ivar aml_dataset: :vartype aml_dataset: ~flow.models.AmlDataset :ivar asset_definition: :vartype asset_definition: ~flow.models.AssetDefinition """ _attribute_map = { 'storage_type': {'key': 'storageType', 'type': 'str'}, 'storage_id': {'key': 'storageId', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_reference': {'key': 'dataReference', 'type': 'DataReference'}, 'aml_dataset': {'key': 'amlDataset', 'type': 'AmlDataset'}, 'asset_definition': {'key': 'assetDefinition', 'type': 'AssetDefinition'}, } def __init__( self, *, storage_type: Optional[Union[str, "DataLocationStorageType"]] = None, storage_id: Optional[str] = None, uri: Optional[str] = None, data_store_name: Optional[str] = None, data_reference: Optional["DataReference"] = None, aml_dataset: Optional["AmlDataset"] = None, asset_definition: Optional["AssetDefinition"] = None, **kwargs ): """ :keyword storage_type: Possible values include: "None", "AzureBlob", "Artifact", "Snapshot", "SavedAmlDataset", "Asset". :paramtype storage_type: str or ~flow.models.DataLocationStorageType :keyword storage_id: :paramtype storage_id: str :keyword uri: :paramtype uri: str :keyword data_store_name: :paramtype data_store_name: str :keyword data_reference: :paramtype data_reference: ~flow.models.DataReference :keyword aml_dataset: :paramtype aml_dataset: ~flow.models.AmlDataset :keyword asset_definition: :paramtype asset_definition: ~flow.models.AssetDefinition """ super(DataLocation, self).__init__(**kwargs) self.storage_type = storage_type self.storage_id = storage_id self.uri = uri self.data_store_name = data_store_name self.data_reference = data_reference self.aml_dataset = aml_dataset self.asset_definition = asset_definition class DataPath(msrest.serialization.Model): """DataPath. :ivar data_store_name: :vartype data_store_name: str :ivar relative_path: :vartype relative_path: str :ivar sql_data_path: :vartype sql_data_path: ~flow.models.SqlDataPath """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'sql_data_path': {'key': 'sqlDataPath', 'type': 'SqlDataPath'}, } def __init__( self, *, data_store_name: Optional[str] = None, relative_path: Optional[str] = None, sql_data_path: Optional["SqlDataPath"] = None, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword relative_path: :paramtype relative_path: str :keyword sql_data_path: :paramtype sql_data_path: ~flow.models.SqlDataPath """ super(DataPath, self).__init__(**kwargs) self.data_store_name = data_store_name self.relative_path = relative_path self.sql_data_path = sql_data_path class DataPathParameter(msrest.serialization.Model): """DataPathParameter. :ivar name: :vartype name: str :ivar documentation: :vartype documentation: str :ivar default_value: :vartype default_value: ~flow.models.LegacyDataPath :ivar is_optional: :vartype is_optional: bool :ivar data_type_id: :vartype data_type_id: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'documentation': {'key': 'documentation', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'LegacyDataPath'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'data_type_id': {'key': 'dataTypeId', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, documentation: Optional[str] = None, default_value: Optional["LegacyDataPath"] = None, is_optional: Optional[bool] = None, data_type_id: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword documentation: :paramtype documentation: str :keyword default_value: :paramtype default_value: ~flow.models.LegacyDataPath :keyword is_optional: :paramtype is_optional: bool :keyword data_type_id: :paramtype data_type_id: str """ super(DataPathParameter, self).__init__(**kwargs) self.name = name self.documentation = documentation self.default_value = default_value self.is_optional = is_optional self.data_type_id = data_type_id class DataPortDto(msrest.serialization.Model): """DataPortDto. :ivar data_port_type: Possible values include: "Input", "Output". :vartype data_port_type: str or ~flow.models.DataPortType :ivar data_port_name: :vartype data_port_name: str :ivar data_store_name: :vartype data_store_name: str :ivar data_store_intellectual_property_access_mode: Possible values include: "ReadOnly", "ReadWrite". :vartype data_store_intellectual_property_access_mode: str or ~flow.models.IntellectualPropertyAccessMode :ivar data_store_intellectual_property_publisher: :vartype data_store_intellectual_property_publisher: str """ _attribute_map = { 'data_port_type': {'key': 'dataPortType', 'type': 'str'}, 'data_port_name': {'key': 'dataPortName', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_intellectual_property_access_mode': {'key': 'dataStoreIntellectualPropertyAccessMode', 'type': 'str'}, 'data_store_intellectual_property_publisher': {'key': 'dataStoreIntellectualPropertyPublisher', 'type': 'str'}, } def __init__( self, *, data_port_type: Optional[Union[str, "DataPortType"]] = None, data_port_name: Optional[str] = None, data_store_name: Optional[str] = None, data_store_intellectual_property_access_mode: Optional[Union[str, "IntellectualPropertyAccessMode"]] = None, data_store_intellectual_property_publisher: Optional[str] = None, **kwargs ): """ :keyword data_port_type: Possible values include: "Input", "Output". :paramtype data_port_type: str or ~flow.models.DataPortType :keyword data_port_name: :paramtype data_port_name: str :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_intellectual_property_access_mode: Possible values include: "ReadOnly", "ReadWrite". :paramtype data_store_intellectual_property_access_mode: str or ~flow.models.IntellectualPropertyAccessMode :keyword data_store_intellectual_property_publisher: :paramtype data_store_intellectual_property_publisher: str """ super(DataPortDto, self).__init__(**kwargs) self.data_port_type = data_port_type self.data_port_name = data_port_name self.data_store_name = data_store_name self.data_store_intellectual_property_access_mode = data_store_intellectual_property_access_mode self.data_store_intellectual_property_publisher = data_store_intellectual_property_publisher class DataReference(msrest.serialization.Model): """DataReference. :ivar type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :vartype type: str or ~flow.models.DataReferenceType :ivar azure_blob_reference: :vartype azure_blob_reference: ~flow.models.AzureBlobReference :ivar azure_data_lake_reference: :vartype azure_data_lake_reference: ~flow.models.AzureDataLakeReference :ivar azure_files_reference: :vartype azure_files_reference: ~flow.models.AzureFilesReference :ivar azure_sql_database_reference: :vartype azure_sql_database_reference: ~flow.models.AzureDatabaseReference :ivar azure_postgres_database_reference: :vartype azure_postgres_database_reference: ~flow.models.AzureDatabaseReference :ivar azure_data_lake_gen2_reference: :vartype azure_data_lake_gen2_reference: ~flow.models.AzureDataLakeGen2Reference :ivar dbfs_reference: :vartype dbfs_reference: ~flow.models.DBFSReference :ivar azure_my_sql_database_reference: :vartype azure_my_sql_database_reference: ~flow.models.AzureDatabaseReference :ivar custom_reference: :vartype custom_reference: ~flow.models.CustomReference :ivar hdfs_reference: :vartype hdfs_reference: ~flow.models.HdfsReference """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'azure_blob_reference': {'key': 'azureBlobReference', 'type': 'AzureBlobReference'}, 'azure_data_lake_reference': {'key': 'azureDataLakeReference', 'type': 'AzureDataLakeReference'}, 'azure_files_reference': {'key': 'azureFilesReference', 'type': 'AzureFilesReference'}, 'azure_sql_database_reference': {'key': 'azureSqlDatabaseReference', 'type': 'AzureDatabaseReference'}, 'azure_postgres_database_reference': {'key': 'azurePostgresDatabaseReference', 'type': 'AzureDatabaseReference'}, 'azure_data_lake_gen2_reference': {'key': 'azureDataLakeGen2Reference', 'type': 'AzureDataLakeGen2Reference'}, 'dbfs_reference': {'key': 'dbfsReference', 'type': 'DBFSReference'}, 'azure_my_sql_database_reference': {'key': 'azureMySqlDatabaseReference', 'type': 'AzureDatabaseReference'}, 'custom_reference': {'key': 'customReference', 'type': 'CustomReference'}, 'hdfs_reference': {'key': 'hdfsReference', 'type': 'HdfsReference'}, } def __init__( self, *, type: Optional[Union[str, "DataReferenceType"]] = None, azure_blob_reference: Optional["AzureBlobReference"] = None, azure_data_lake_reference: Optional["AzureDataLakeReference"] = None, azure_files_reference: Optional["AzureFilesReference"] = None, azure_sql_database_reference: Optional["AzureDatabaseReference"] = None, azure_postgres_database_reference: Optional["AzureDatabaseReference"] = None, azure_data_lake_gen2_reference: Optional["AzureDataLakeGen2Reference"] = None, dbfs_reference: Optional["DBFSReference"] = None, azure_my_sql_database_reference: Optional["AzureDatabaseReference"] = None, custom_reference: Optional["CustomReference"] = None, hdfs_reference: Optional["HdfsReference"] = None, **kwargs ): """ :keyword type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :paramtype type: str or ~flow.models.DataReferenceType :keyword azure_blob_reference: :paramtype azure_blob_reference: ~flow.models.AzureBlobReference :keyword azure_data_lake_reference: :paramtype azure_data_lake_reference: ~flow.models.AzureDataLakeReference :keyword azure_files_reference: :paramtype azure_files_reference: ~flow.models.AzureFilesReference :keyword azure_sql_database_reference: :paramtype azure_sql_database_reference: ~flow.models.AzureDatabaseReference :keyword azure_postgres_database_reference: :paramtype azure_postgres_database_reference: ~flow.models.AzureDatabaseReference :keyword azure_data_lake_gen2_reference: :paramtype azure_data_lake_gen2_reference: ~flow.models.AzureDataLakeGen2Reference :keyword dbfs_reference: :paramtype dbfs_reference: ~flow.models.DBFSReference :keyword azure_my_sql_database_reference: :paramtype azure_my_sql_database_reference: ~flow.models.AzureDatabaseReference :keyword custom_reference: :paramtype custom_reference: ~flow.models.CustomReference :keyword hdfs_reference: :paramtype hdfs_reference: ~flow.models.HdfsReference """ super(DataReference, self).__init__(**kwargs) self.type = type self.azure_blob_reference = azure_blob_reference self.azure_data_lake_reference = azure_data_lake_reference self.azure_files_reference = azure_files_reference self.azure_sql_database_reference = azure_sql_database_reference self.azure_postgres_database_reference = azure_postgres_database_reference self.azure_data_lake_gen2_reference = azure_data_lake_gen2_reference self.dbfs_reference = dbfs_reference self.azure_my_sql_database_reference = azure_my_sql_database_reference self.custom_reference = custom_reference self.hdfs_reference = hdfs_reference class DataReferenceConfiguration(msrest.serialization.Model): """DataReferenceConfiguration. :ivar data_store_name: :vartype data_store_name: str :ivar mode: Possible values include: "Mount", "Download", "Upload". :vartype mode: str or ~flow.models.DataStoreMode :ivar path_on_data_store: :vartype path_on_data_store: str :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'mode': {'key': 'mode', 'type': 'str'}, 'path_on_data_store': {'key': 'pathOnDataStore', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, } def __init__( self, *, data_store_name: Optional[str] = None, mode: Optional[Union[str, "DataStoreMode"]] = None, path_on_data_store: Optional[str] = None, path_on_compute: Optional[str] = None, overwrite: Optional[bool] = None, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword mode: Possible values include: "Mount", "Download", "Upload". :paramtype mode: str or ~flow.models.DataStoreMode :keyword path_on_data_store: :paramtype path_on_data_store: str :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool """ super(DataReferenceConfiguration, self).__init__(**kwargs) self.data_store_name = data_store_name self.mode = mode self.path_on_data_store = path_on_data_store self.path_on_compute = path_on_compute self.overwrite = overwrite class DataSetDefinition(msrest.serialization.Model): """DataSetDefinition. :ivar data_type_short_name: :vartype data_type_short_name: str :ivar parameter_name: :vartype parameter_name: str :ivar value: :vartype value: ~flow.models.DataSetDefinitionValue """ _attribute_map = { 'data_type_short_name': {'key': 'dataTypeShortName', 'type': 'str'}, 'parameter_name': {'key': 'parameterName', 'type': 'str'}, 'value': {'key': 'value', 'type': 'DataSetDefinitionValue'}, } def __init__( self, *, data_type_short_name: Optional[str] = None, parameter_name: Optional[str] = None, value: Optional["DataSetDefinitionValue"] = None, **kwargs ): """ :keyword data_type_short_name: :paramtype data_type_short_name: str :keyword parameter_name: :paramtype parameter_name: str :keyword value: :paramtype value: ~flow.models.DataSetDefinitionValue """ super(DataSetDefinition, self).__init__(**kwargs) self.data_type_short_name = data_type_short_name self.parameter_name = parameter_name self.value = value class DataSetDefinitionValue(msrest.serialization.Model): """DataSetDefinitionValue. :ivar literal_value: :vartype literal_value: ~flow.models.DataPath :ivar data_set_reference: :vartype data_set_reference: ~flow.models.RegisteredDataSetReference :ivar saved_data_set_reference: :vartype saved_data_set_reference: ~flow.models.SavedDataSetReference :ivar asset_definition: :vartype asset_definition: ~flow.models.AssetDefinition """ _attribute_map = { 'literal_value': {'key': 'literalValue', 'type': 'DataPath'}, 'data_set_reference': {'key': 'dataSetReference', 'type': 'RegisteredDataSetReference'}, 'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'SavedDataSetReference'}, 'asset_definition': {'key': 'assetDefinition', 'type': 'AssetDefinition'}, } def __init__( self, *, literal_value: Optional["DataPath"] = None, data_set_reference: Optional["RegisteredDataSetReference"] = None, saved_data_set_reference: Optional["SavedDataSetReference"] = None, asset_definition: Optional["AssetDefinition"] = None, **kwargs ): """ :keyword literal_value: :paramtype literal_value: ~flow.models.DataPath :keyword data_set_reference: :paramtype data_set_reference: ~flow.models.RegisteredDataSetReference :keyword saved_data_set_reference: :paramtype saved_data_set_reference: ~flow.models.SavedDataSetReference :keyword asset_definition: :paramtype asset_definition: ~flow.models.AssetDefinition """ super(DataSetDefinitionValue, self).__init__(**kwargs) self.literal_value = literal_value self.data_set_reference = data_set_reference self.saved_data_set_reference = saved_data_set_reference self.asset_definition = asset_definition class DatasetIdentifier(msrest.serialization.Model): """DatasetIdentifier. :ivar saved_id: :vartype saved_id: str :ivar registered_id: :vartype registered_id: str :ivar registered_version: :vartype registered_version: str """ _attribute_map = { 'saved_id': {'key': 'savedId', 'type': 'str'}, 'registered_id': {'key': 'registeredId', 'type': 'str'}, 'registered_version': {'key': 'registeredVersion', 'type': 'str'}, } def __init__( self, *, saved_id: Optional[str] = None, registered_id: Optional[str] = None, registered_version: Optional[str] = None, **kwargs ): """ :keyword saved_id: :paramtype saved_id: str :keyword registered_id: :paramtype registered_id: str :keyword registered_version: :paramtype registered_version: str """ super(DatasetIdentifier, self).__init__(**kwargs) self.saved_id = saved_id self.registered_id = registered_id self.registered_version = registered_version class DatasetInputDetails(msrest.serialization.Model): """DatasetInputDetails. :ivar input_name: :vartype input_name: str :ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs". :vartype mechanism: str or ~flow.models.DatasetDeliveryMechanism :ivar path_on_compute: :vartype path_on_compute: str """ _attribute_map = { 'input_name': {'key': 'inputName', 'type': 'str'}, 'mechanism': {'key': 'mechanism', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, } def __init__( self, *, input_name: Optional[str] = None, mechanism: Optional[Union[str, "DatasetDeliveryMechanism"]] = None, path_on_compute: Optional[str] = None, **kwargs ): """ :keyword input_name: :paramtype input_name: str :keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs". :paramtype mechanism: str or ~flow.models.DatasetDeliveryMechanism :keyword path_on_compute: :paramtype path_on_compute: str """ super(DatasetInputDetails, self).__init__(**kwargs) self.input_name = input_name self.mechanism = mechanism self.path_on_compute = path_on_compute class DatasetLineage(msrest.serialization.Model): """DatasetLineage. :ivar identifier: :vartype identifier: ~flow.models.DatasetIdentifier :ivar consumption_type: Possible values include: "RunInput", "Reference". :vartype consumption_type: str or ~flow.models.DatasetConsumptionType :ivar input_details: :vartype input_details: ~flow.models.DatasetInputDetails """ _attribute_map = { 'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'}, 'consumption_type': {'key': 'consumptionType', 'type': 'str'}, 'input_details': {'key': 'inputDetails', 'type': 'DatasetInputDetails'}, } def __init__( self, *, identifier: Optional["DatasetIdentifier"] = None, consumption_type: Optional[Union[str, "DatasetConsumptionType"]] = None, input_details: Optional["DatasetInputDetails"] = None, **kwargs ): """ :keyword identifier: :paramtype identifier: ~flow.models.DatasetIdentifier :keyword consumption_type: Possible values include: "RunInput", "Reference". :paramtype consumption_type: str or ~flow.models.DatasetConsumptionType :keyword input_details: :paramtype input_details: ~flow.models.DatasetInputDetails """ super(DatasetLineage, self).__init__(**kwargs) self.identifier = identifier self.consumption_type = consumption_type self.input_details = input_details class DatasetOutput(msrest.serialization.Model): """DatasetOutput. :ivar dataset_type: Possible values include: "File", "Tabular". :vartype dataset_type: str or ~flow.models.DatasetType :ivar dataset_registration: :vartype dataset_registration: ~flow.models.DatasetRegistration :ivar dataset_output_options: :vartype dataset_output_options: ~flow.models.DatasetOutputOptions """ _attribute_map = { 'dataset_type': {'key': 'datasetType', 'type': 'str'}, 'dataset_registration': {'key': 'datasetRegistration', 'type': 'DatasetRegistration'}, 'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'DatasetOutputOptions'}, } def __init__( self, *, dataset_type: Optional[Union[str, "DatasetType"]] = None, dataset_registration: Optional["DatasetRegistration"] = None, dataset_output_options: Optional["DatasetOutputOptions"] = None, **kwargs ): """ :keyword dataset_type: Possible values include: "File", "Tabular". :paramtype dataset_type: str or ~flow.models.DatasetType :keyword dataset_registration: :paramtype dataset_registration: ~flow.models.DatasetRegistration :keyword dataset_output_options: :paramtype dataset_output_options: ~flow.models.DatasetOutputOptions """ super(DatasetOutput, self).__init__(**kwargs) self.dataset_type = dataset_type self.dataset_registration = dataset_registration self.dataset_output_options = dataset_output_options class DatasetOutputDetails(msrest.serialization.Model): """DatasetOutputDetails. :ivar output_name: :vartype output_name: str """ _attribute_map = { 'output_name': {'key': 'outputName', 'type': 'str'}, } def __init__( self, *, output_name: Optional[str] = None, **kwargs ): """ :keyword output_name: :paramtype output_name: str """ super(DatasetOutputDetails, self).__init__(**kwargs) self.output_name = output_name class DatasetOutputOptions(msrest.serialization.Model): """DatasetOutputOptions. :ivar source_globs: :vartype source_globs: ~flow.models.GlobsOptions :ivar path_on_datastore: :vartype path_on_datastore: str :ivar path_on_datastore_parameter_assignment: :vartype path_on_datastore_parameter_assignment: ~flow.models.ParameterAssignment """ _attribute_map = { 'source_globs': {'key': 'sourceGlobs', 'type': 'GlobsOptions'}, 'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'}, 'path_on_datastore_parameter_assignment': {'key': 'PathOnDatastoreParameterAssignment', 'type': 'ParameterAssignment'}, } def __init__( self, *, source_globs: Optional["GlobsOptions"] = None, path_on_datastore: Optional[str] = None, path_on_datastore_parameter_assignment: Optional["ParameterAssignment"] = None, **kwargs ): """ :keyword source_globs: :paramtype source_globs: ~flow.models.GlobsOptions :keyword path_on_datastore: :paramtype path_on_datastore: str :keyword path_on_datastore_parameter_assignment: :paramtype path_on_datastore_parameter_assignment: ~flow.models.ParameterAssignment """ super(DatasetOutputOptions, self).__init__(**kwargs) self.source_globs = source_globs self.path_on_datastore = path_on_datastore self.path_on_datastore_parameter_assignment = path_on_datastore_parameter_assignment class DataSetPathParameter(msrest.serialization.Model): """DataSetPathParameter. :ivar name: :vartype name: str :ivar documentation: :vartype documentation: str :ivar default_value: :vartype default_value: ~flow.models.DataSetDefinitionValue :ivar is_optional: :vartype is_optional: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'documentation': {'key': 'documentation', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'DataSetDefinitionValue'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, } def __init__( self, *, name: Optional[str] = None, documentation: Optional[str] = None, default_value: Optional["DataSetDefinitionValue"] = None, is_optional: Optional[bool] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword documentation: :paramtype documentation: str :keyword default_value: :paramtype default_value: ~flow.models.DataSetDefinitionValue :keyword is_optional: :paramtype is_optional: bool """ super(DataSetPathParameter, self).__init__(**kwargs) self.name = name self.documentation = documentation self.default_value = default_value self.is_optional = is_optional class DatasetRegistration(msrest.serialization.Model): """DatasetRegistration. :ivar name: :vartype name: str :ivar create_new_version: :vartype create_new_version: bool :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar additional_transformations: :vartype additional_transformations: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'create_new_version': {'key': 'createNewVersion', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, create_new_version: Optional[bool] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, additional_transformations: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword create_new_version: :paramtype create_new_version: bool :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword additional_transformations: :paramtype additional_transformations: str """ super(DatasetRegistration, self).__init__(**kwargs) self.name = name self.create_new_version = create_new_version self.description = description self.tags = tags self.additional_transformations = additional_transformations class DatasetRegistrationOptions(msrest.serialization.Model): """DatasetRegistrationOptions. :ivar additional_transformation: :vartype additional_transformation: str """ _attribute_map = { 'additional_transformation': {'key': 'additionalTransformation', 'type': 'str'}, } def __init__( self, *, additional_transformation: Optional[str] = None, **kwargs ): """ :keyword additional_transformation: :paramtype additional_transformation: str """ super(DatasetRegistrationOptions, self).__init__(**kwargs) self.additional_transformation = additional_transformation class DataSettings(msrest.serialization.Model): """DataSettings. :ivar target_column_name: :vartype target_column_name: str :ivar weight_column_name: :vartype weight_column_name: str :ivar positive_label: :vartype positive_label: str :ivar validation_data: :vartype validation_data: ~flow.models.ValidationDataSettings :ivar test_data: :vartype test_data: ~flow.models.TestDataSettings """ _attribute_map = { 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, 'positive_label': {'key': 'positiveLabel', 'type': 'str'}, 'validation_data': {'key': 'validationData', 'type': 'ValidationDataSettings'}, 'test_data': {'key': 'testData', 'type': 'TestDataSettings'}, } def __init__( self, *, target_column_name: Optional[str] = None, weight_column_name: Optional[str] = None, positive_label: Optional[str] = None, validation_data: Optional["ValidationDataSettings"] = None, test_data: Optional["TestDataSettings"] = None, **kwargs ): """ :keyword target_column_name: :paramtype target_column_name: str :keyword weight_column_name: :paramtype weight_column_name: str :keyword positive_label: :paramtype positive_label: str :keyword validation_data: :paramtype validation_data: ~flow.models.ValidationDataSettings :keyword test_data: :paramtype test_data: ~flow.models.TestDataSettings """ super(DataSettings, self).__init__(**kwargs) self.target_column_name = target_column_name self.weight_column_name = weight_column_name self.positive_label = positive_label self.validation_data = validation_data self.test_data = test_data class DatastoreSetting(msrest.serialization.Model): """DatastoreSetting. :ivar data_store_name: :vartype data_store_name: str """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, } def __init__( self, *, data_store_name: Optional[str] = None, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str """ super(DatastoreSetting, self).__init__(**kwargs) self.data_store_name = data_store_name class DataTransferCloudConfiguration(msrest.serialization.Model): """DataTransferCloudConfiguration. :ivar allow_overwrite: :vartype allow_overwrite: bool """ _attribute_map = { 'allow_overwrite': {'key': 'AllowOverwrite', 'type': 'bool'}, } def __init__( self, *, allow_overwrite: Optional[bool] = None, **kwargs ): """ :keyword allow_overwrite: :paramtype allow_overwrite: bool """ super(DataTransferCloudConfiguration, self).__init__(**kwargs) self.allow_overwrite = allow_overwrite class DataTransferSink(msrest.serialization.Model): """DataTransferSink. :ivar type: Possible values include: "DataBase", "FileSystem". :vartype type: str or ~flow.models.DataTransferStorageType :ivar file_system: :vartype file_system: ~flow.models.FileSystem :ivar database_sink: :vartype database_sink: ~flow.models.DatabaseSink """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'file_system': {'key': 'fileSystem', 'type': 'FileSystem'}, 'database_sink': {'key': 'databaseSink', 'type': 'DatabaseSink'}, } def __init__( self, *, type: Optional[Union[str, "DataTransferStorageType"]] = None, file_system: Optional["FileSystem"] = None, database_sink: Optional["DatabaseSink"] = None, **kwargs ): """ :keyword type: Possible values include: "DataBase", "FileSystem". :paramtype type: str or ~flow.models.DataTransferStorageType :keyword file_system: :paramtype file_system: ~flow.models.FileSystem :keyword database_sink: :paramtype database_sink: ~flow.models.DatabaseSink """ super(DataTransferSink, self).__init__(**kwargs) self.type = type self.file_system = file_system self.database_sink = database_sink class DataTransferSource(msrest.serialization.Model): """DataTransferSource. :ivar type: Possible values include: "DataBase", "FileSystem". :vartype type: str or ~flow.models.DataTransferStorageType :ivar file_system: :vartype file_system: ~flow.models.FileSystem :ivar database_source: :vartype database_source: ~flow.models.DatabaseSource """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'file_system': {'key': 'fileSystem', 'type': 'FileSystem'}, 'database_source': {'key': 'databaseSource', 'type': 'DatabaseSource'}, } def __init__( self, *, type: Optional[Union[str, "DataTransferStorageType"]] = None, file_system: Optional["FileSystem"] = None, database_source: Optional["DatabaseSource"] = None, **kwargs ): """ :keyword type: Possible values include: "DataBase", "FileSystem". :paramtype type: str or ~flow.models.DataTransferStorageType :keyword file_system: :paramtype file_system: ~flow.models.FileSystem :keyword database_source: :paramtype database_source: ~flow.models.DatabaseSource """ super(DataTransferSource, self).__init__(**kwargs) self.type = type self.file_system = file_system self.database_source = database_source class DataTransferV2CloudSetting(msrest.serialization.Model): """DataTransferV2CloudSetting. :ivar task_type: Possible values include: "ImportData", "ExportData", "CopyData". :vartype task_type: str or ~flow.models.DataTransferTaskType :ivar compute_name: :vartype compute_name: str :ivar copy_data_task: :vartype copy_data_task: ~flow.models.CopyDataTask :ivar import_data_task: :vartype import_data_task: ~flow.models.ImportDataTask :ivar export_data_task: :vartype export_data_task: ~flow.models.ExportDataTask :ivar data_transfer_sources: This is a dictionary. :vartype data_transfer_sources: dict[str, ~flow.models.DataTransferSource] :ivar data_transfer_sinks: This is a dictionary. :vartype data_transfer_sinks: dict[str, ~flow.models.DataTransferSink] :ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :vartype data_copy_mode: str or ~flow.models.DataCopyMode """ _attribute_map = { 'task_type': {'key': 'taskType', 'type': 'str'}, 'compute_name': {'key': 'ComputeName', 'type': 'str'}, 'copy_data_task': {'key': 'CopyDataTask', 'type': 'CopyDataTask'}, 'import_data_task': {'key': 'ImportDataTask', 'type': 'ImportDataTask'}, 'export_data_task': {'key': 'ExportDataTask', 'type': 'ExportDataTask'}, 'data_transfer_sources': {'key': 'DataTransferSources', 'type': '{DataTransferSource}'}, 'data_transfer_sinks': {'key': 'DataTransferSinks', 'type': '{DataTransferSink}'}, 'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'}, } def __init__( self, *, task_type: Optional[Union[str, "DataTransferTaskType"]] = None, compute_name: Optional[str] = None, copy_data_task: Optional["CopyDataTask"] = None, import_data_task: Optional["ImportDataTask"] = None, export_data_task: Optional["ExportDataTask"] = None, data_transfer_sources: Optional[Dict[str, "DataTransferSource"]] = None, data_transfer_sinks: Optional[Dict[str, "DataTransferSink"]] = None, data_copy_mode: Optional[Union[str, "DataCopyMode"]] = None, **kwargs ): """ :keyword task_type: Possible values include: "ImportData", "ExportData", "CopyData". :paramtype task_type: str or ~flow.models.DataTransferTaskType :keyword compute_name: :paramtype compute_name: str :keyword copy_data_task: :paramtype copy_data_task: ~flow.models.CopyDataTask :keyword import_data_task: :paramtype import_data_task: ~flow.models.ImportDataTask :keyword export_data_task: :paramtype export_data_task: ~flow.models.ExportDataTask :keyword data_transfer_sources: This is a dictionary. :paramtype data_transfer_sources: dict[str, ~flow.models.DataTransferSource] :keyword data_transfer_sinks: This is a dictionary. :paramtype data_transfer_sinks: dict[str, ~flow.models.DataTransferSink] :keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict". :paramtype data_copy_mode: str or ~flow.models.DataCopyMode """ super(DataTransferV2CloudSetting, self).__init__(**kwargs) self.task_type = task_type self.compute_name = compute_name self.copy_data_task = copy_data_task self.import_data_task = import_data_task self.export_data_task = export_data_task self.data_transfer_sources = data_transfer_sources self.data_transfer_sinks = data_transfer_sinks self.data_copy_mode = data_copy_mode class DataTypeCreationInfo(msrest.serialization.Model): """DataTypeCreationInfo. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar is_directory: :vartype is_directory: bool :ivar file_extension: :vartype file_extension: str :ivar parent_data_type_ids: :vartype parent_data_type_ids: list[str] """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'is_directory': {'key': 'isDirectory', 'type': 'bool'}, 'file_extension': {'key': 'fileExtension', 'type': 'str'}, 'parent_data_type_ids': {'key': 'parentDataTypeIds', 'type': '[str]'}, } def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, description: Optional[str] = None, is_directory: Optional[bool] = None, file_extension: Optional[str] = None, parent_data_type_ids: Optional[List[str]] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword is_directory: :paramtype is_directory: bool :keyword file_extension: :paramtype file_extension: str :keyword parent_data_type_ids: :paramtype parent_data_type_ids: list[str] """ super(DataTypeCreationInfo, self).__init__(**kwargs) self.id = id self.name = name self.description = description self.is_directory = is_directory self.file_extension = file_extension self.parent_data_type_ids = parent_data_type_ids class DBFSReference(msrest.serialization.Model): """DBFSReference. :ivar relative_path: :vartype relative_path: str :ivar aml_data_store_name: :vartype aml_data_store_name: str """ _attribute_map = { 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, } def __init__( self, *, relative_path: Optional[str] = None, aml_data_store_name: Optional[str] = None, **kwargs ): """ :keyword relative_path: :paramtype relative_path: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str """ super(DBFSReference, self).__init__(**kwargs) self.relative_path = relative_path self.aml_data_store_name = aml_data_store_name class DbfsStorageInfoDto(msrest.serialization.Model): """DbfsStorageInfoDto. :ivar destination: :vartype destination: str """ _attribute_map = { 'destination': {'key': 'destination', 'type': 'str'}, } def __init__( self, *, destination: Optional[str] = None, **kwargs ): """ :keyword destination: :paramtype destination: str """ super(DbfsStorageInfoDto, self).__init__(**kwargs) self.destination = destination class DebugInfoResponse(msrest.serialization.Model): """Internal debugging information not intended for external clients. :ivar type: The type. :vartype type: str :ivar message: The message. :vartype message: str :ivar stack_trace: The stack trace. :vartype stack_trace: str :ivar inner_exception: Internal debugging information not intended for external clients. :vartype inner_exception: ~flow.models.DebugInfoResponse :ivar data: This is a dictionary. :vartype data: dict[str, any] :ivar error_response: The error response. :vartype error_response: ~flow.models.ErrorResponse """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'stack_trace': {'key': 'stackTrace', 'type': 'str'}, 'inner_exception': {'key': 'innerException', 'type': 'DebugInfoResponse'}, 'data': {'key': 'data', 'type': '{object}'}, 'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'}, } def __init__( self, *, type: Optional[str] = None, message: Optional[str] = None, stack_trace: Optional[str] = None, inner_exception: Optional["DebugInfoResponse"] = None, data: Optional[Dict[str, Any]] = None, error_response: Optional["ErrorResponse"] = None, **kwargs ): """ :keyword type: The type. :paramtype type: str :keyword message: The message. :paramtype message: str :keyword stack_trace: The stack trace. :paramtype stack_trace: str :keyword inner_exception: Internal debugging information not intended for external clients. :paramtype inner_exception: ~flow.models.DebugInfoResponse :keyword data: This is a dictionary. :paramtype data: dict[str, any] :keyword error_response: The error response. :paramtype error_response: ~flow.models.ErrorResponse """ super(DebugInfoResponse, self).__init__(**kwargs) self.type = type self.message = message self.stack_trace = stack_trace self.inner_exception = inner_exception self.data = data self.error_response = error_response class DeployFlowRequest(msrest.serialization.Model): """DeployFlowRequest. :ivar source_resource_id: :vartype source_resource_id: str :ivar source_flow_run_id: :vartype source_flow_run_id: str :ivar source_flow_id: :vartype source_flow_id: str :ivar flow: :vartype flow: ~flow.models.Flow :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar flow_submit_run_settings: :vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings :ivar output_names_included_in_endpoint_response: :vartype output_names_included_in_endpoint_response: list[str] :ivar endpoint_name: :vartype endpoint_name: str :ivar endpoint_description: :vartype endpoint_description: str :ivar auth_mode: Possible values include: "AMLToken", "Key", "AADToken". :vartype auth_mode: str or ~flow.models.EndpointAuthMode :ivar identity: :vartype identity: ~flow.models.ManagedServiceIdentity :ivar endpoint_tags: This is a dictionary. :vartype endpoint_tags: dict[str, str] :ivar connection_overrides: :vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting] :ivar use_workspace_connection: :vartype use_workspace_connection: bool :ivar deployment_name: :vartype deployment_name: str :ivar environment: :vartype environment: str :ivar environment_variables: This is a dictionary. :vartype environment_variables: dict[str, str] :ivar deployment_tags: This is a dictionary. :vartype deployment_tags: dict[str, str] :ivar app_insights_enabled: :vartype app_insights_enabled: bool :ivar enable_model_data_collector: :vartype enable_model_data_collector: bool :ivar skip_update_traffic_to_full: :vartype skip_update_traffic_to_full: bool :ivar enable_streaming_response: :vartype enable_streaming_response: bool :ivar use_flow_snapshot_to_deploy: :vartype use_flow_snapshot_to_deploy: bool :ivar instance_type: :vartype instance_type: str :ivar instance_count: :vartype instance_count: int :ivar auto_grant_connection_permission: :vartype auto_grant_connection_permission: bool """ _attribute_map = { 'source_resource_id': {'key': 'sourceResourceId', 'type': 'str'}, 'source_flow_run_id': {'key': 'sourceFlowRunId', 'type': 'str'}, 'source_flow_id': {'key': 'sourceFlowId', 'type': 'str'}, 'flow': {'key': 'flow', 'type': 'Flow'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'}, 'output_names_included_in_endpoint_response': {'key': 'outputNamesIncludedInEndpointResponse', 'type': '[str]'}, 'endpoint_name': {'key': 'endpointName', 'type': 'str'}, 'endpoint_description': {'key': 'endpointDescription', 'type': 'str'}, 'auth_mode': {'key': 'authMode', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, 'endpoint_tags': {'key': 'endpointTags', 'type': '{str}'}, 'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'}, 'use_workspace_connection': {'key': 'useWorkspaceConnection', 'type': 'bool'}, 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, 'environment': {'key': 'environment', 'type': 'str'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'deployment_tags': {'key': 'deploymentTags', 'type': '{str}'}, 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, 'enable_model_data_collector': {'key': 'enableModelDataCollector', 'type': 'bool'}, 'skip_update_traffic_to_full': {'key': 'skipUpdateTrafficToFull', 'type': 'bool'}, 'enable_streaming_response': {'key': 'enableStreamingResponse', 'type': 'bool'}, 'use_flow_snapshot_to_deploy': {'key': 'useFlowSnapshotToDeploy', 'type': 'bool'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'auto_grant_connection_permission': {'key': 'autoGrantConnectionPermission', 'type': 'bool'}, } def __init__( self, *, source_resource_id: Optional[str] = None, source_flow_run_id: Optional[str] = None, source_flow_id: Optional[str] = None, flow: Optional["Flow"] = None, flow_type: Optional[Union[str, "FlowType"]] = None, flow_submit_run_settings: Optional["FlowSubmitRunSettings"] = None, output_names_included_in_endpoint_response: Optional[List[str]] = None, endpoint_name: Optional[str] = None, endpoint_description: Optional[str] = None, auth_mode: Optional[Union[str, "EndpointAuthMode"]] = None, identity: Optional["ManagedServiceIdentity"] = None, endpoint_tags: Optional[Dict[str, str]] = None, connection_overrides: Optional[List["ConnectionOverrideSetting"]] = None, use_workspace_connection: Optional[bool] = None, deployment_name: Optional[str] = None, environment: Optional[str] = None, environment_variables: Optional[Dict[str, str]] = None, deployment_tags: Optional[Dict[str, str]] = None, app_insights_enabled: Optional[bool] = None, enable_model_data_collector: Optional[bool] = None, skip_update_traffic_to_full: Optional[bool] = None, enable_streaming_response: Optional[bool] = None, use_flow_snapshot_to_deploy: Optional[bool] = None, instance_type: Optional[str] = None, instance_count: Optional[int] = None, auto_grant_connection_permission: Optional[bool] = None, **kwargs ): """ :keyword source_resource_id: :paramtype source_resource_id: str :keyword source_flow_run_id: :paramtype source_flow_run_id: str :keyword source_flow_id: :paramtype source_flow_id: str :keyword flow: :paramtype flow: ~flow.models.Flow :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword flow_submit_run_settings: :paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings :keyword output_names_included_in_endpoint_response: :paramtype output_names_included_in_endpoint_response: list[str] :keyword endpoint_name: :paramtype endpoint_name: str :keyword endpoint_description: :paramtype endpoint_description: str :keyword auth_mode: Possible values include: "AMLToken", "Key", "AADToken". :paramtype auth_mode: str or ~flow.models.EndpointAuthMode :keyword identity: :paramtype identity: ~flow.models.ManagedServiceIdentity :keyword endpoint_tags: This is a dictionary. :paramtype endpoint_tags: dict[str, str] :keyword connection_overrides: :paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting] :keyword use_workspace_connection: :paramtype use_workspace_connection: bool :keyword deployment_name: :paramtype deployment_name: str :keyword environment: :paramtype environment: str :keyword environment_variables: This is a dictionary. :paramtype environment_variables: dict[str, str] :keyword deployment_tags: This is a dictionary. :paramtype deployment_tags: dict[str, str] :keyword app_insights_enabled: :paramtype app_insights_enabled: bool :keyword enable_model_data_collector: :paramtype enable_model_data_collector: bool :keyword skip_update_traffic_to_full: :paramtype skip_update_traffic_to_full: bool :keyword enable_streaming_response: :paramtype enable_streaming_response: bool :keyword use_flow_snapshot_to_deploy: :paramtype use_flow_snapshot_to_deploy: bool :keyword instance_type: :paramtype instance_type: str :keyword instance_count: :paramtype instance_count: int :keyword auto_grant_connection_permission: :paramtype auto_grant_connection_permission: bool """ super(DeployFlowRequest, self).__init__(**kwargs) self.source_resource_id = source_resource_id self.source_flow_run_id = source_flow_run_id self.source_flow_id = source_flow_id self.flow = flow self.flow_type = flow_type self.flow_submit_run_settings = flow_submit_run_settings self.output_names_included_in_endpoint_response = output_names_included_in_endpoint_response self.endpoint_name = endpoint_name self.endpoint_description = endpoint_description self.auth_mode = auth_mode self.identity = identity self.endpoint_tags = endpoint_tags self.connection_overrides = connection_overrides self.use_workspace_connection = use_workspace_connection self.deployment_name = deployment_name self.environment = environment self.environment_variables = environment_variables self.deployment_tags = deployment_tags self.app_insights_enabled = app_insights_enabled self.enable_model_data_collector = enable_model_data_collector self.skip_update_traffic_to_full = skip_update_traffic_to_full self.enable_streaming_response = enable_streaming_response self.use_flow_snapshot_to_deploy = use_flow_snapshot_to_deploy self.instance_type = instance_type self.instance_count = instance_count self.auto_grant_connection_permission = auto_grant_connection_permission class DeploymentInfo(msrest.serialization.Model): """DeploymentInfo. :ivar operation_id: :vartype operation_id: str :ivar service_id: :vartype service_id: str :ivar service_name: :vartype service_name: str :ivar status_detail: :vartype status_detail: str """ _attribute_map = { 'operation_id': {'key': 'operationId', 'type': 'str'}, 'service_id': {'key': 'serviceId', 'type': 'str'}, 'service_name': {'key': 'serviceName', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, } def __init__( self, *, operation_id: Optional[str] = None, service_id: Optional[str] = None, service_name: Optional[str] = None, status_detail: Optional[str] = None, **kwargs ): """ :keyword operation_id: :paramtype operation_id: str :keyword service_id: :paramtype service_id: str :keyword service_name: :paramtype service_name: str :keyword status_detail: :paramtype status_detail: str """ super(DeploymentInfo, self).__init__(**kwargs) self.operation_id = operation_id self.service_id = service_id self.service_name = service_name self.status_detail = status_detail class DistributionConfiguration(msrest.serialization.Model): """DistributionConfiguration. :ivar distribution_type: Possible values include: "PyTorch", "TensorFlow", "Mpi", "Ray". :vartype distribution_type: str or ~flow.models.DistributionType """ _attribute_map = { 'distribution_type': {'key': 'distributionType', 'type': 'str'}, } def __init__( self, *, distribution_type: Optional[Union[str, "DistributionType"]] = None, **kwargs ): """ :keyword distribution_type: Possible values include: "PyTorch", "TensorFlow", "Mpi", "Ray". :paramtype distribution_type: str or ~flow.models.DistributionType """ super(DistributionConfiguration, self).__init__(**kwargs) self.distribution_type = distribution_type class DistributionParameter(msrest.serialization.Model): """DistributionParameter. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar description: :vartype description: str :ivar input_type: Possible values include: "Text", "Number". :vartype input_type: str or ~flow.models.DistributionParameterEnum """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'input_type': {'key': 'inputType', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, label: Optional[str] = None, description: Optional[str] = None, input_type: Optional[Union[str, "DistributionParameterEnum"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword description: :paramtype description: str :keyword input_type: Possible values include: "Text", "Number". :paramtype input_type: str or ~flow.models.DistributionParameterEnum """ super(DistributionParameter, self).__init__(**kwargs) self.name = name self.label = label self.description = description self.input_type = input_type class DockerBuildContext(msrest.serialization.Model): """DockerBuildContext. :ivar location_type: Possible values include: "Git", "StorageAccount". :vartype location_type: str or ~flow.models.BuildContextLocationType :ivar location: :vartype location: str :ivar dockerfile_path: :vartype dockerfile_path: str """ _attribute_map = { 'location_type': {'key': 'locationType', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'dockerfile_path': {'key': 'dockerfilePath', 'type': 'str'}, } def __init__( self, *, location_type: Optional[Union[str, "BuildContextLocationType"]] = None, location: Optional[str] = None, dockerfile_path: Optional[str] = "Dockerfile", **kwargs ): """ :keyword location_type: Possible values include: "Git", "StorageAccount". :paramtype location_type: str or ~flow.models.BuildContextLocationType :keyword location: :paramtype location: str :keyword dockerfile_path: :paramtype dockerfile_path: str """ super(DockerBuildContext, self).__init__(**kwargs) self.location_type = location_type self.location = location self.dockerfile_path = dockerfile_path class DockerConfiguration(msrest.serialization.Model): """DockerConfiguration. :ivar use_docker: :vartype use_docker: bool :ivar shared_volumes: :vartype shared_volumes: bool :ivar arguments: :vartype arguments: list[str] """ _attribute_map = { 'use_docker': {'key': 'useDocker', 'type': 'bool'}, 'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'}, 'arguments': {'key': 'arguments', 'type': '[str]'}, } def __init__( self, *, use_docker: Optional[bool] = None, shared_volumes: Optional[bool] = None, arguments: Optional[List[str]] = None, **kwargs ): """ :keyword use_docker: :paramtype use_docker: bool :keyword shared_volumes: :paramtype shared_volumes: bool :keyword arguments: :paramtype arguments: list[str] """ super(DockerConfiguration, self).__init__(**kwargs) self.use_docker = use_docker self.shared_volumes = shared_volumes self.arguments = arguments class DockerImagePlatform(msrest.serialization.Model): """DockerImagePlatform. :ivar os: :vartype os: str :ivar architecture: :vartype architecture: str """ _attribute_map = { 'os': {'key': 'os', 'type': 'str'}, 'architecture': {'key': 'architecture', 'type': 'str'}, } def __init__( self, *, os: Optional[str] = None, architecture: Optional[str] = None, **kwargs ): """ :keyword os: :paramtype os: str :keyword architecture: :paramtype architecture: str """ super(DockerImagePlatform, self).__init__(**kwargs) self.os = os self.architecture = architecture class DockerSection(msrest.serialization.Model): """DockerSection. :ivar base_image: :vartype base_image: str :ivar platform: :vartype platform: ~flow.models.DockerImagePlatform :ivar base_dockerfile: :vartype base_dockerfile: str :ivar build_context: :vartype build_context: ~flow.models.DockerBuildContext :ivar base_image_registry: :vartype base_image_registry: ~flow.models.ContainerRegistry """ _attribute_map = { 'base_image': {'key': 'baseImage', 'type': 'str'}, 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'}, 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'}, 'build_context': {'key': 'buildContext', 'type': 'DockerBuildContext'}, 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'}, } def __init__( self, *, base_image: Optional[str] = None, platform: Optional["DockerImagePlatform"] = None, base_dockerfile: Optional[str] = None, build_context: Optional["DockerBuildContext"] = None, base_image_registry: Optional["ContainerRegistry"] = None, **kwargs ): """ :keyword base_image: :paramtype base_image: str :keyword platform: :paramtype platform: ~flow.models.DockerImagePlatform :keyword base_dockerfile: :paramtype base_dockerfile: str :keyword build_context: :paramtype build_context: ~flow.models.DockerBuildContext :keyword base_image_registry: :paramtype base_image_registry: ~flow.models.ContainerRegistry """ super(DockerSection, self).__init__(**kwargs) self.base_image = base_image self.platform = platform self.base_dockerfile = base_dockerfile self.build_context = build_context self.base_image_registry = base_image_registry class DockerSettingConfiguration(msrest.serialization.Model): """DockerSettingConfiguration. :ivar use_docker: :vartype use_docker: bool :ivar shared_volumes: :vartype shared_volumes: bool :ivar shm_size: :vartype shm_size: str :ivar arguments: :vartype arguments: list[str] """ _attribute_map = { 'use_docker': {'key': 'useDocker', 'type': 'bool'}, 'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'}, 'shm_size': {'key': 'shmSize', 'type': 'str'}, 'arguments': {'key': 'arguments', 'type': '[str]'}, } def __init__( self, *, use_docker: Optional[bool] = None, shared_volumes: Optional[bool] = None, shm_size: Optional[str] = None, arguments: Optional[List[str]] = None, **kwargs ): """ :keyword use_docker: :paramtype use_docker: bool :keyword shared_volumes: :paramtype shared_volumes: bool :keyword shm_size: :paramtype shm_size: str :keyword arguments: :paramtype arguments: list[str] """ super(DockerSettingConfiguration, self).__init__(**kwargs) self.use_docker = use_docker self.shared_volumes = shared_volumes self.shm_size = shm_size self.arguments = arguments class DoWhileControlFlowInfo(msrest.serialization.Model): """DoWhileControlFlowInfo. :ivar output_port_name_to_input_port_names_mapping: Dictionary of <componentsΒ·1sqg750Β·schemasΒ·dowhilecontrolflowinfoΒ·propertiesΒ·outputportnametoinputportnamesmappingΒ·additionalproperties>. :vartype output_port_name_to_input_port_names_mapping: dict[str, list[str]] :ivar condition_output_port_name: :vartype condition_output_port_name: str :ivar run_settings: :vartype run_settings: ~flow.models.DoWhileControlFlowRunSettings """ _attribute_map = { 'output_port_name_to_input_port_names_mapping': {'key': 'outputPortNameToInputPortNamesMapping', 'type': '{[str]}'}, 'condition_output_port_name': {'key': 'conditionOutputPortName', 'type': 'str'}, 'run_settings': {'key': 'runSettings', 'type': 'DoWhileControlFlowRunSettings'}, } def __init__( self, *, output_port_name_to_input_port_names_mapping: Optional[Dict[str, List[str]]] = None, condition_output_port_name: Optional[str] = None, run_settings: Optional["DoWhileControlFlowRunSettings"] = None, **kwargs ): """ :keyword output_port_name_to_input_port_names_mapping: Dictionary of <componentsΒ·1sqg750Β·schemasΒ·dowhilecontrolflowinfoΒ·propertiesΒ·outputportnametoinputportnamesmappingΒ·additionalproperties>. :paramtype output_port_name_to_input_port_names_mapping: dict[str, list[str]] :keyword condition_output_port_name: :paramtype condition_output_port_name: str :keyword run_settings: :paramtype run_settings: ~flow.models.DoWhileControlFlowRunSettings """ super(DoWhileControlFlowInfo, self).__init__(**kwargs) self.output_port_name_to_input_port_names_mapping = output_port_name_to_input_port_names_mapping self.condition_output_port_name = condition_output_port_name self.run_settings = run_settings class DoWhileControlFlowRunSettings(msrest.serialization.Model): """DoWhileControlFlowRunSettings. :ivar max_loop_iteration_count: :vartype max_loop_iteration_count: ~flow.models.ParameterAssignment """ _attribute_map = { 'max_loop_iteration_count': {'key': 'maxLoopIterationCount', 'type': 'ParameterAssignment'}, } def __init__( self, *, max_loop_iteration_count: Optional["ParameterAssignment"] = None, **kwargs ): """ :keyword max_loop_iteration_count: :paramtype max_loop_iteration_count: ~flow.models.ParameterAssignment """ super(DoWhileControlFlowRunSettings, self).__init__(**kwargs) self.max_loop_iteration_count = max_loop_iteration_count class DownloadResourceInfo(msrest.serialization.Model): """DownloadResourceInfo. :ivar download_url: :vartype download_url: str :ivar size: :vartype size: long """ _attribute_map = { 'download_url': {'key': 'downloadUrl', 'type': 'str'}, 'size': {'key': 'size', 'type': 'long'}, } def __init__( self, *, download_url: Optional[str] = None, size: Optional[int] = None, **kwargs ): """ :keyword download_url: :paramtype download_url: str :keyword size: :paramtype size: long """ super(DownloadResourceInfo, self).__init__(**kwargs) self.download_url = download_url self.size = size class EndpointSetting(msrest.serialization.Model): """EndpointSetting. :ivar type: :vartype type: str :ivar port: :vartype port: int :ivar ssl_thumbprint: :vartype ssl_thumbprint: str :ivar endpoint: :vartype endpoint: str :ivar proxy_endpoint: :vartype proxy_endpoint: str :ivar status: :vartype status: str :ivar error_message: :vartype error_message: str :ivar enabled: :vartype enabled: bool :ivar properties: Dictionary of :code:`<string>`. :vartype properties: dict[str, str] :ivar nodes: :vartype nodes: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'port': {'key': 'port', 'type': 'int'}, 'ssl_thumbprint': {'key': 'sslThumbprint', 'type': 'str'}, 'endpoint': {'key': 'endpoint', 'type': 'str'}, 'proxy_endpoint': {'key': 'proxyEndpoint', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'error_message': {'key': 'errorMessage', 'type': 'str'}, 'enabled': {'key': 'enabled', 'type': 'bool'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'nodes': {'key': 'nodes', 'type': 'str'}, } def __init__( self, *, type: Optional[str] = None, port: Optional[int] = None, ssl_thumbprint: Optional[str] = None, endpoint: Optional[str] = None, proxy_endpoint: Optional[str] = None, status: Optional[str] = None, error_message: Optional[str] = None, enabled: Optional[bool] = None, properties: Optional[Dict[str, str]] = None, nodes: Optional[str] = None, **kwargs ): """ :keyword type: :paramtype type: str :keyword port: :paramtype port: int :keyword ssl_thumbprint: :paramtype ssl_thumbprint: str :keyword endpoint: :paramtype endpoint: str :keyword proxy_endpoint: :paramtype proxy_endpoint: str :keyword status: :paramtype status: str :keyword error_message: :paramtype error_message: str :keyword enabled: :paramtype enabled: bool :keyword properties: Dictionary of :code:`<string>`. :paramtype properties: dict[str, str] :keyword nodes: :paramtype nodes: str """ super(EndpointSetting, self).__init__(**kwargs) self.type = type self.port = port self.ssl_thumbprint = ssl_thumbprint self.endpoint = endpoint self.proxy_endpoint = proxy_endpoint self.status = status self.error_message = error_message self.enabled = enabled self.properties = properties self.nodes = nodes class EntityInterface(msrest.serialization.Model): """EntityInterface. :ivar parameters: :vartype parameters: list[~flow.models.Parameter] :ivar ports: :vartype ports: ~flow.models.NodePortInterface :ivar metadata_parameters: :vartype metadata_parameters: list[~flow.models.Parameter] :ivar data_path_parameters: :vartype data_path_parameters: list[~flow.models.DataPathParameter] :ivar data_path_parameter_list: :vartype data_path_parameter_list: list[~flow.models.DataSetPathParameter] :ivar asset_output_settings_parameter_list: :vartype asset_output_settings_parameter_list: list[~flow.models.AssetOutputSettingsParameter] """ _attribute_map = { 'parameters': {'key': 'parameters', 'type': '[Parameter]'}, 'ports': {'key': 'ports', 'type': 'NodePortInterface'}, 'metadata_parameters': {'key': 'metadataParameters', 'type': '[Parameter]'}, 'data_path_parameters': {'key': 'dataPathParameters', 'type': '[DataPathParameter]'}, 'data_path_parameter_list': {'key': 'dataPathParameterList', 'type': '[DataSetPathParameter]'}, 'asset_output_settings_parameter_list': {'key': 'AssetOutputSettingsParameterList', 'type': '[AssetOutputSettingsParameter]'}, } def __init__( self, *, parameters: Optional[List["Parameter"]] = None, ports: Optional["NodePortInterface"] = None, metadata_parameters: Optional[List["Parameter"]] = None, data_path_parameters: Optional[List["DataPathParameter"]] = None, data_path_parameter_list: Optional[List["DataSetPathParameter"]] = None, asset_output_settings_parameter_list: Optional[List["AssetOutputSettingsParameter"]] = None, **kwargs ): """ :keyword parameters: :paramtype parameters: list[~flow.models.Parameter] :keyword ports: :paramtype ports: ~flow.models.NodePortInterface :keyword metadata_parameters: :paramtype metadata_parameters: list[~flow.models.Parameter] :keyword data_path_parameters: :paramtype data_path_parameters: list[~flow.models.DataPathParameter] :keyword data_path_parameter_list: :paramtype data_path_parameter_list: list[~flow.models.DataSetPathParameter] :keyword asset_output_settings_parameter_list: :paramtype asset_output_settings_parameter_list: list[~flow.models.AssetOutputSettingsParameter] """ super(EntityInterface, self).__init__(**kwargs) self.parameters = parameters self.ports = ports self.metadata_parameters = metadata_parameters self.data_path_parameters = data_path_parameters self.data_path_parameter_list = data_path_parameter_list self.asset_output_settings_parameter_list = asset_output_settings_parameter_list class EntrySetting(msrest.serialization.Model): """EntrySetting. :ivar file: :vartype file: str :ivar class_name: :vartype class_name: str """ _attribute_map = { 'file': {'key': 'file', 'type': 'str'}, 'class_name': {'key': 'className', 'type': 'str'}, } def __init__( self, *, file: Optional[str] = None, class_name: Optional[str] = None, **kwargs ): """ :keyword file: :paramtype file: str :keyword class_name: :paramtype class_name: str """ super(EntrySetting, self).__init__(**kwargs) self.file = file self.class_name = class_name class EnumParameterRule(msrest.serialization.Model): """EnumParameterRule. :ivar valid_values: :vartype valid_values: list[str] """ _attribute_map = { 'valid_values': {'key': 'validValues', 'type': '[str]'}, } def __init__( self, *, valid_values: Optional[List[str]] = None, **kwargs ): """ :keyword valid_values: :paramtype valid_values: list[str] """ super(EnumParameterRule, self).__init__(**kwargs) self.valid_values = valid_values class EnvironmentConfiguration(msrest.serialization.Model): """EnvironmentConfiguration. :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar use_environment_definition: :vartype use_environment_definition: bool :ivar environment_definition_string: :vartype environment_definition_string: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'use_environment_definition': {'key': 'useEnvironmentDefinition', 'type': 'bool'}, 'environment_definition_string': {'key': 'environmentDefinitionString', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, version: Optional[str] = None, use_environment_definition: Optional[bool] = None, environment_definition_string: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword use_environment_definition: :paramtype use_environment_definition: bool :keyword environment_definition_string: :paramtype environment_definition_string: str """ super(EnvironmentConfiguration, self).__init__(**kwargs) self.name = name self.version = version self.use_environment_definition = use_environment_definition self.environment_definition_string = environment_definition_string class EnvironmentDefinition(msrest.serialization.Model): """EnvironmentDefinition. :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar asset_id: :vartype asset_id: str :ivar auto_rebuild: :vartype auto_rebuild: bool :ivar python: :vartype python: ~flow.models.PythonSection :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar docker: :vartype docker: ~flow.models.DockerSection :ivar spark: :vartype spark: ~flow.models.SparkSection :ivar r: :vartype r: ~flow.models.RSection :ivar inferencing_stack_version: :vartype inferencing_stack_version: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, 'auto_rebuild': {'key': 'autoRebuild', 'type': 'bool'}, 'python': {'key': 'python', 'type': 'PythonSection'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'docker': {'key': 'docker', 'type': 'DockerSection'}, 'spark': {'key': 'spark', 'type': 'SparkSection'}, 'r': {'key': 'r', 'type': 'RSection'}, 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, version: Optional[str] = None, asset_id: Optional[str] = None, auto_rebuild: Optional[bool] = None, python: Optional["PythonSection"] = None, environment_variables: Optional[Dict[str, str]] = None, docker: Optional["DockerSection"] = None, spark: Optional["SparkSection"] = None, r: Optional["RSection"] = None, inferencing_stack_version: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword asset_id: :paramtype asset_id: str :keyword auto_rebuild: :paramtype auto_rebuild: bool :keyword python: :paramtype python: ~flow.models.PythonSection :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword docker: :paramtype docker: ~flow.models.DockerSection :keyword spark: :paramtype spark: ~flow.models.SparkSection :keyword r: :paramtype r: ~flow.models.RSection :keyword inferencing_stack_version: :paramtype inferencing_stack_version: str """ super(EnvironmentDefinition, self).__init__(**kwargs) self.name = name self.version = version self.asset_id = asset_id self.auto_rebuild = auto_rebuild self.python = python self.environment_variables = environment_variables self.docker = docker self.spark = spark self.r = r self.inferencing_stack_version = inferencing_stack_version class EnvironmentDefinitionDto(msrest.serialization.Model): """EnvironmentDefinitionDto. :ivar environment_name: :vartype environment_name: str :ivar environment_version: :vartype environment_version: str :ivar intellectual_property_publisher: :vartype intellectual_property_publisher: str """ _attribute_map = { 'environment_name': {'key': 'environmentName', 'type': 'str'}, 'environment_version': {'key': 'environmentVersion', 'type': 'str'}, 'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'}, } def __init__( self, *, environment_name: Optional[str] = None, environment_version: Optional[str] = None, intellectual_property_publisher: Optional[str] = None, **kwargs ): """ :keyword environment_name: :paramtype environment_name: str :keyword environment_version: :paramtype environment_version: str :keyword intellectual_property_publisher: :paramtype intellectual_property_publisher: str """ super(EnvironmentDefinitionDto, self).__init__(**kwargs) self.environment_name = environment_name self.environment_version = environment_version self.intellectual_property_publisher = intellectual_property_publisher class EPRPipelineRunErrorClassificationRequest(msrest.serialization.Model): """EPRPipelineRunErrorClassificationRequest. :ivar root_run_id: :vartype root_run_id: str :ivar run_id: :vartype run_id: str :ivar task_result: :vartype task_result: str :ivar failure_type: :vartype failure_type: str :ivar failure_name: :vartype failure_name: str :ivar responsible_team: :vartype responsible_team: str """ _attribute_map = { 'root_run_id': {'key': 'rootRunId', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'task_result': {'key': 'taskResult', 'type': 'str'}, 'failure_type': {'key': 'failureType', 'type': 'str'}, 'failure_name': {'key': 'failureName', 'type': 'str'}, 'responsible_team': {'key': 'responsibleTeam', 'type': 'str'}, } def __init__( self, *, root_run_id: Optional[str] = None, run_id: Optional[str] = None, task_result: Optional[str] = None, failure_type: Optional[str] = None, failure_name: Optional[str] = None, responsible_team: Optional[str] = None, **kwargs ): """ :keyword root_run_id: :paramtype root_run_id: str :keyword run_id: :paramtype run_id: str :keyword task_result: :paramtype task_result: str :keyword failure_type: :paramtype failure_type: str :keyword failure_name: :paramtype failure_name: str :keyword responsible_team: :paramtype responsible_team: str """ super(EPRPipelineRunErrorClassificationRequest, self).__init__(**kwargs) self.root_run_id = root_run_id self.run_id = run_id self.task_result = task_result self.failure_type = failure_type self.failure_name = failure_name self.responsible_team = responsible_team class ErrorAdditionalInfo(msrest.serialization.Model): """The resource management error additional info. :ivar type: The additional info type. :vartype type: str :ivar info: The additional info. :vartype info: any """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'info': {'key': 'info', 'type': 'object'}, } def __init__( self, *, type: Optional[str] = None, info: Optional[Any] = None, **kwargs ): """ :keyword type: The additional info type. :paramtype type: str :keyword info: The additional info. :paramtype info: any """ super(ErrorAdditionalInfo, self).__init__(**kwargs) self.type = type self.info = info class ErrorResponse(msrest.serialization.Model): """The error response. :ivar error: The root error. :vartype error: ~flow.models.RootError :ivar correlation: Dictionary containing correlation details for the error. :vartype correlation: dict[str, str] :ivar environment: The hosting environment. :vartype environment: str :ivar location: The Azure region. :vartype location: str :ivar time: The time in UTC. :vartype time: ~datetime.datetime :ivar component_name: Component name where error originated/encountered. :vartype component_name: str """ _attribute_map = { 'error': {'key': 'error', 'type': 'RootError'}, 'correlation': {'key': 'correlation', 'type': '{str}'}, 'environment': {'key': 'environment', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'time': {'key': 'time', 'type': 'iso-8601'}, 'component_name': {'key': 'componentName', 'type': 'str'}, } def __init__( self, *, error: Optional["RootError"] = None, correlation: Optional[Dict[str, str]] = None, environment: Optional[str] = None, location: Optional[str] = None, time: Optional[datetime.datetime] = None, component_name: Optional[str] = None, **kwargs ): """ :keyword error: The root error. :paramtype error: ~flow.models.RootError :keyword correlation: Dictionary containing correlation details for the error. :paramtype correlation: dict[str, str] :keyword environment: The hosting environment. :paramtype environment: str :keyword location: The Azure region. :paramtype location: str :keyword time: The time in UTC. :paramtype time: ~datetime.datetime :keyword component_name: Component name where error originated/encountered. :paramtype component_name: str """ super(ErrorResponse, self).__init__(**kwargs) self.error = error self.correlation = correlation self.environment = environment self.location = location self.time = time self.component_name = component_name class EsCloudConfiguration(msrest.serialization.Model): """EsCloudConfiguration. :ivar enable_output_to_file_based_on_data_type_id: :vartype enable_output_to_file_based_on_data_type_id: bool :ivar environment: :vartype environment: ~flow.models.EnvironmentConfiguration :ivar hyper_drive_configuration: :vartype hyper_drive_configuration: ~flow.models.HyperDriveConfiguration :ivar k8_s_config: :vartype k8_s_config: ~flow.models.K8SConfiguration :ivar resource_config: :vartype resource_config: ~flow.models.AEVAResourceConfiguration :ivar torch_distributed_config: :vartype torch_distributed_config: ~flow.models.TorchDistributedConfiguration :ivar target_selector_config: :vartype target_selector_config: ~flow.models.TargetSelectorConfiguration :ivar docker_config: :vartype docker_config: ~flow.models.DockerSettingConfiguration :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar max_run_duration_seconds: :vartype max_run_duration_seconds: int :ivar identity: :vartype identity: ~flow.models.IdentitySetting :ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`. :vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration] :ivar run_config: :vartype run_config: str """ _attribute_map = { 'enable_output_to_file_based_on_data_type_id': {'key': 'enableOutputToFileBasedOnDataTypeId', 'type': 'bool'}, 'environment': {'key': 'environment', 'type': 'EnvironmentConfiguration'}, 'hyper_drive_configuration': {'key': 'hyperDriveConfiguration', 'type': 'HyperDriveConfiguration'}, 'k8_s_config': {'key': 'k8sConfig', 'type': 'K8SConfiguration'}, 'resource_config': {'key': 'resourceConfig', 'type': 'AEVAResourceConfiguration'}, 'torch_distributed_config': {'key': 'torchDistributedConfig', 'type': 'TorchDistributedConfiguration'}, 'target_selector_config': {'key': 'targetSelectorConfig', 'type': 'TargetSelectorConfiguration'}, 'docker_config': {'key': 'dockerConfig', 'type': 'DockerSettingConfiguration'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'int'}, 'identity': {'key': 'identity', 'type': 'IdentitySetting'}, 'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'}, 'run_config': {'key': 'runConfig', 'type': 'str'}, } def __init__( self, *, enable_output_to_file_based_on_data_type_id: Optional[bool] = None, environment: Optional["EnvironmentConfiguration"] = None, hyper_drive_configuration: Optional["HyperDriveConfiguration"] = None, k8_s_config: Optional["K8SConfiguration"] = None, resource_config: Optional["AEVAResourceConfiguration"] = None, torch_distributed_config: Optional["TorchDistributedConfiguration"] = None, target_selector_config: Optional["TargetSelectorConfiguration"] = None, docker_config: Optional["DockerSettingConfiguration"] = None, environment_variables: Optional[Dict[str, str]] = None, max_run_duration_seconds: Optional[int] = None, identity: Optional["IdentitySetting"] = None, application_endpoints: Optional[Dict[str, "ApplicationEndpointConfiguration"]] = None, run_config: Optional[str] = None, **kwargs ): """ :keyword enable_output_to_file_based_on_data_type_id: :paramtype enable_output_to_file_based_on_data_type_id: bool :keyword environment: :paramtype environment: ~flow.models.EnvironmentConfiguration :keyword hyper_drive_configuration: :paramtype hyper_drive_configuration: ~flow.models.HyperDriveConfiguration :keyword k8_s_config: :paramtype k8_s_config: ~flow.models.K8SConfiguration :keyword resource_config: :paramtype resource_config: ~flow.models.AEVAResourceConfiguration :keyword torch_distributed_config: :paramtype torch_distributed_config: ~flow.models.TorchDistributedConfiguration :keyword target_selector_config: :paramtype target_selector_config: ~flow.models.TargetSelectorConfiguration :keyword docker_config: :paramtype docker_config: ~flow.models.DockerSettingConfiguration :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword max_run_duration_seconds: :paramtype max_run_duration_seconds: int :keyword identity: :paramtype identity: ~flow.models.IdentitySetting :keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`. :paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration] :keyword run_config: :paramtype run_config: str """ super(EsCloudConfiguration, self).__init__(**kwargs) self.enable_output_to_file_based_on_data_type_id = enable_output_to_file_based_on_data_type_id self.environment = environment self.hyper_drive_configuration = hyper_drive_configuration self.k8_s_config = k8_s_config self.resource_config = resource_config self.torch_distributed_config = torch_distributed_config self.target_selector_config = target_selector_config self.docker_config = docker_config self.environment_variables = environment_variables self.max_run_duration_seconds = max_run_duration_seconds self.identity = identity self.application_endpoints = application_endpoints self.run_config = run_config class EvaluationFlowRunSettings(msrest.serialization.Model): """EvaluationFlowRunSettings. :ivar flow_run_id: :vartype flow_run_id: str :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar inputs_mapping: This is a dictionary. :vartype inputs_mapping: dict[str, str] :ivar data_inputs: This is a dictionary. :vartype data_inputs: dict[str, str] :ivar connection_overrides: :vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting] :ivar runtime_name: :vartype runtime_name: str :ivar aml_compute_name: :vartype aml_compute_name: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'flow_run_id': {'key': 'flowRunId', 'type': 'str'}, 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'}, 'data_inputs': {'key': 'dataInputs', 'type': '{str}'}, 'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, *, flow_run_id: Optional[str] = None, flow_run_display_name: Optional[str] = None, batch_data_input: Optional["BatchDataInput"] = None, inputs_mapping: Optional[Dict[str, str]] = None, data_inputs: Optional[Dict[str, str]] = None, connection_overrides: Optional[List["ConnectionOverrideSetting"]] = None, runtime_name: Optional[str] = None, aml_compute_name: Optional[str] = None, properties: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword flow_run_id: :paramtype flow_run_id: str :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword inputs_mapping: This is a dictionary. :paramtype inputs_mapping: dict[str, str] :keyword data_inputs: This is a dictionary. :paramtype data_inputs: dict[str, str] :keyword connection_overrides: :paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting] :keyword runtime_name: :paramtype runtime_name: str :keyword aml_compute_name: :paramtype aml_compute_name: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(EvaluationFlowRunSettings, self).__init__(**kwargs) self.flow_run_id = flow_run_id self.flow_run_display_name = flow_run_display_name self.batch_data_input = batch_data_input self.inputs_mapping = inputs_mapping self.data_inputs = data_inputs self.connection_overrides = connection_overrides self.runtime_name = runtime_name self.aml_compute_name = aml_compute_name self.properties = properties class ExampleRequest(msrest.serialization.Model): """ExampleRequest. :ivar inputs: This is a dictionary. :vartype inputs: dict[str, list[list[any]]] :ivar global_parameters: This is a dictionary. :vartype global_parameters: dict[str, any] """ _attribute_map = { 'inputs': {'key': 'inputs', 'type': '{[[object]]}'}, 'global_parameters': {'key': 'globalParameters', 'type': '{object}'}, } def __init__( self, *, inputs: Optional[Dict[str, List[List[Any]]]] = None, global_parameters: Optional[Dict[str, Any]] = None, **kwargs ): """ :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, list[list[any]]] :keyword global_parameters: This is a dictionary. :paramtype global_parameters: dict[str, any] """ super(ExampleRequest, self).__init__(**kwargs) self.inputs = inputs self.global_parameters = global_parameters class ExecutionContextDto(msrest.serialization.Model): """ExecutionContextDto. :ivar executable: :vartype executable: str :ivar user_code: :vartype user_code: str :ivar arguments: :vartype arguments: str """ _attribute_map = { 'executable': {'key': 'executable', 'type': 'str'}, 'user_code': {'key': 'userCode', 'type': 'str'}, 'arguments': {'key': 'arguments', 'type': 'str'}, } def __init__( self, *, executable: Optional[str] = None, user_code: Optional[str] = None, arguments: Optional[str] = None, **kwargs ): """ :keyword executable: :paramtype executable: str :keyword user_code: :paramtype user_code: str :keyword arguments: :paramtype arguments: str """ super(ExecutionContextDto, self).__init__(**kwargs) self.executable = executable self.user_code = user_code self.arguments = arguments class ExecutionDataLocation(msrest.serialization.Model): """ExecutionDataLocation. :ivar dataset: :vartype dataset: ~flow.models.RunDatasetReference :ivar data_path: :vartype data_path: ~flow.models.ExecutionDataPath :ivar uri: :vartype uri: ~flow.models.UriReference :ivar type: :vartype type: str """ _attribute_map = { 'dataset': {'key': 'dataset', 'type': 'RunDatasetReference'}, 'data_path': {'key': 'dataPath', 'type': 'ExecutionDataPath'}, 'uri': {'key': 'uri', 'type': 'UriReference'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, *, dataset: Optional["RunDatasetReference"] = None, data_path: Optional["ExecutionDataPath"] = None, uri: Optional["UriReference"] = None, type: Optional[str] = None, **kwargs ): """ :keyword dataset: :paramtype dataset: ~flow.models.RunDatasetReference :keyword data_path: :paramtype data_path: ~flow.models.ExecutionDataPath :keyword uri: :paramtype uri: ~flow.models.UriReference :keyword type: :paramtype type: str """ super(ExecutionDataLocation, self).__init__(**kwargs) self.dataset = dataset self.data_path = data_path self.uri = uri self.type = type class ExecutionDataPath(msrest.serialization.Model): """ExecutionDataPath. :ivar datastore_name: :vartype datastore_name: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'datastore_name': {'key': 'datastoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, *, datastore_name: Optional[str] = None, relative_path: Optional[str] = None, **kwargs ): """ :keyword datastore_name: :paramtype datastore_name: str :keyword relative_path: :paramtype relative_path: str """ super(ExecutionDataPath, self).__init__(**kwargs) self.datastore_name = datastore_name self.relative_path = relative_path class ExecutionGlobsOptions(msrest.serialization.Model): """ExecutionGlobsOptions. :ivar glob_patterns: :vartype glob_patterns: list[str] """ _attribute_map = { 'glob_patterns': {'key': 'globPatterns', 'type': '[str]'}, } def __init__( self, *, glob_patterns: Optional[List[str]] = None, **kwargs ): """ :keyword glob_patterns: :paramtype glob_patterns: list[str] """ super(ExecutionGlobsOptions, self).__init__(**kwargs) self.glob_patterns = glob_patterns class ExperimentComputeMetaInfo(msrest.serialization.Model): """ExperimentComputeMetaInfo. :ivar current_node_count: :vartype current_node_count: int :ivar target_node_count: :vartype target_node_count: int :ivar max_node_count: :vartype max_node_count: int :ivar min_node_count: :vartype min_node_count: int :ivar idle_node_count: :vartype idle_node_count: int :ivar running_node_count: :vartype running_node_count: int :ivar preparing_node_count: :vartype preparing_node_count: int :ivar unusable_node_count: :vartype unusable_node_count: int :ivar leaving_node_count: :vartype leaving_node_count: int :ivar preempted_node_count: :vartype preempted_node_count: int :ivar vm_size: :vartype vm_size: str :ivar location: :vartype location: str :ivar provisioning_state: :vartype provisioning_state: str :ivar state: :vartype state: str :ivar os_type: :vartype os_type: str :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar created_by_studio: :vartype created_by_studio: bool :ivar is_gpu_type: :vartype is_gpu_type: bool :ivar resource_id: :vartype resource_id: str :ivar compute_type: :vartype compute_type: str """ _attribute_map = { 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'}, 'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, } def __init__( self, *, current_node_count: Optional[int] = None, target_node_count: Optional[int] = None, max_node_count: Optional[int] = None, min_node_count: Optional[int] = None, idle_node_count: Optional[int] = None, running_node_count: Optional[int] = None, preparing_node_count: Optional[int] = None, unusable_node_count: Optional[int] = None, leaving_node_count: Optional[int] = None, preempted_node_count: Optional[int] = None, vm_size: Optional[str] = None, location: Optional[str] = None, provisioning_state: Optional[str] = None, state: Optional[str] = None, os_type: Optional[str] = None, id: Optional[str] = None, name: Optional[str] = None, created_by_studio: Optional[bool] = None, is_gpu_type: Optional[bool] = None, resource_id: Optional[str] = None, compute_type: Optional[str] = None, **kwargs ): """ :keyword current_node_count: :paramtype current_node_count: int :keyword target_node_count: :paramtype target_node_count: int :keyword max_node_count: :paramtype max_node_count: int :keyword min_node_count: :paramtype min_node_count: int :keyword idle_node_count: :paramtype idle_node_count: int :keyword running_node_count: :paramtype running_node_count: int :keyword preparing_node_count: :paramtype preparing_node_count: int :keyword unusable_node_count: :paramtype unusable_node_count: int :keyword leaving_node_count: :paramtype leaving_node_count: int :keyword preempted_node_count: :paramtype preempted_node_count: int :keyword vm_size: :paramtype vm_size: str :keyword location: :paramtype location: str :keyword provisioning_state: :paramtype provisioning_state: str :keyword state: :paramtype state: str :keyword os_type: :paramtype os_type: str :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword created_by_studio: :paramtype created_by_studio: bool :keyword is_gpu_type: :paramtype is_gpu_type: bool :keyword resource_id: :paramtype resource_id: str :keyword compute_type: :paramtype compute_type: str """ super(ExperimentComputeMetaInfo, self).__init__(**kwargs) self.current_node_count = current_node_count self.target_node_count = target_node_count self.max_node_count = max_node_count self.min_node_count = min_node_count self.idle_node_count = idle_node_count self.running_node_count = running_node_count self.preparing_node_count = preparing_node_count self.unusable_node_count = unusable_node_count self.leaving_node_count = leaving_node_count self.preempted_node_count = preempted_node_count self.vm_size = vm_size self.location = location self.provisioning_state = provisioning_state self.state = state self.os_type = os_type self.id = id self.name = name self.created_by_studio = created_by_studio self.is_gpu_type = is_gpu_type self.resource_id = resource_id self.compute_type = compute_type class ExperimentInfo(msrest.serialization.Model): """ExperimentInfo. :ivar experiment_name: :vartype experiment_name: str :ivar experiment_id: :vartype experiment_id: str """ _attribute_map = { 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, } def __init__( self, *, experiment_name: Optional[str] = None, experiment_id: Optional[str] = None, **kwargs ): """ :keyword experiment_name: :paramtype experiment_name: str :keyword experiment_id: :paramtype experiment_id: str """ super(ExperimentInfo, self).__init__(**kwargs) self.experiment_name = experiment_name self.experiment_id = experiment_id class ExportComponentMetaInfo(msrest.serialization.Model): """ExportComponentMetaInfo. :ivar module_entity: :vartype module_entity: ~flow.models.ModuleEntity :ivar module_version: :vartype module_version: str :ivar is_anonymous: :vartype is_anonymous: bool """ _attribute_map = { 'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'}, 'module_version': {'key': 'moduleVersion', 'type': 'str'}, 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, } def __init__( self, *, module_entity: Optional["ModuleEntity"] = None, module_version: Optional[str] = None, is_anonymous: Optional[bool] = None, **kwargs ): """ :keyword module_entity: :paramtype module_entity: ~flow.models.ModuleEntity :keyword module_version: :paramtype module_version: str :keyword is_anonymous: :paramtype is_anonymous: bool """ super(ExportComponentMetaInfo, self).__init__(**kwargs) self.module_entity = module_entity self.module_version = module_version self.is_anonymous = is_anonymous class ExportDataTask(msrest.serialization.Model): """ExportDataTask. :ivar data_transfer_sink: :vartype data_transfer_sink: ~flow.models.DataTransferSink """ _attribute_map = { 'data_transfer_sink': {'key': 'DataTransferSink', 'type': 'DataTransferSink'}, } def __init__( self, *, data_transfer_sink: Optional["DataTransferSink"] = None, **kwargs ): """ :keyword data_transfer_sink: :paramtype data_transfer_sink: ~flow.models.DataTransferSink """ super(ExportDataTask, self).__init__(**kwargs) self.data_transfer_sink = data_transfer_sink class FeaturizationSettings(msrest.serialization.Model): """FeaturizationSettings. :ivar mode: Possible values include: "Auto", "Custom", "Off". :vartype mode: str or ~flow.models.FeaturizationMode :ivar blocked_transformers: :vartype blocked_transformers: list[str] :ivar column_purposes: Dictionary of :code:`<string>`. :vartype column_purposes: dict[str, str] :ivar drop_columns: :vartype drop_columns: list[str] :ivar transformer_params: Dictionary of <componentsΒ·1gi3krmΒ·schemasΒ·featurizationsettingsΒ·propertiesΒ·transformerparamsΒ·additionalproperties>. :vartype transformer_params: dict[str, list[~flow.models.ColumnTransformer]] :ivar dataset_language: :vartype dataset_language: str :ivar enable_dnn_featurization: :vartype enable_dnn_featurization: bool """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'}, 'column_purposes': {'key': 'columnPurposes', 'type': '{str}'}, 'drop_columns': {'key': 'dropColumns', 'type': '[str]'}, 'transformer_params': {'key': 'transformerParams', 'type': '{[ColumnTransformer]}'}, 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, 'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'}, } def __init__( self, *, mode: Optional[Union[str, "FeaturizationMode"]] = None, blocked_transformers: Optional[List[str]] = None, column_purposes: Optional[Dict[str, str]] = None, drop_columns: Optional[List[str]] = None, transformer_params: Optional[Dict[str, List["ColumnTransformer"]]] = None, dataset_language: Optional[str] = None, enable_dnn_featurization: Optional[bool] = None, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom", "Off". :paramtype mode: str or ~flow.models.FeaturizationMode :keyword blocked_transformers: :paramtype blocked_transformers: list[str] :keyword column_purposes: Dictionary of :code:`<string>`. :paramtype column_purposes: dict[str, str] :keyword drop_columns: :paramtype drop_columns: list[str] :keyword transformer_params: Dictionary of <componentsΒ·1gi3krmΒ·schemasΒ·featurizationsettingsΒ·propertiesΒ·transformerparamsΒ·additionalproperties>. :paramtype transformer_params: dict[str, list[~flow.models.ColumnTransformer]] :keyword dataset_language: :paramtype dataset_language: str :keyword enable_dnn_featurization: :paramtype enable_dnn_featurization: bool """ super(FeaturizationSettings, self).__init__(**kwargs) self.mode = mode self.blocked_transformers = blocked_transformers self.column_purposes = column_purposes self.drop_columns = drop_columns self.transformer_params = transformer_params self.dataset_language = dataset_language self.enable_dnn_featurization = enable_dnn_featurization class FeedDto(msrest.serialization.Model): """FeedDto. :ivar name: :vartype name: str :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar sharing_scopes: :vartype sharing_scopes: list[~flow.models.SharingScope] :ivar supported_asset_types: :vartype supported_asset_types: ~flow.models.FeedDtoSupportedAssetTypes :ivar regional_workspace_storage: This is a dictionary. :vartype regional_workspace_storage: dict[str, list[str]] :ivar intellectual_property_publisher: :vartype intellectual_property_publisher: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'sharing_scopes': {'key': 'sharingScopes', 'type': '[SharingScope]'}, 'supported_asset_types': {'key': 'supportedAssetTypes', 'type': 'FeedDtoSupportedAssetTypes'}, 'regional_workspace_storage': {'key': 'regionalWorkspaceStorage', 'type': '{[str]}'}, 'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, display_name: Optional[str] = None, description: Optional[str] = None, sharing_scopes: Optional[List["SharingScope"]] = None, supported_asset_types: Optional["FeedDtoSupportedAssetTypes"] = None, regional_workspace_storage: Optional[Dict[str, List[str]]] = None, intellectual_property_publisher: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword sharing_scopes: :paramtype sharing_scopes: list[~flow.models.SharingScope] :keyword supported_asset_types: :paramtype supported_asset_types: ~flow.models.FeedDtoSupportedAssetTypes :keyword regional_workspace_storage: This is a dictionary. :paramtype regional_workspace_storage: dict[str, list[str]] :keyword intellectual_property_publisher: :paramtype intellectual_property_publisher: str """ super(FeedDto, self).__init__(**kwargs) self.name = name self.display_name = display_name self.description = description self.sharing_scopes = sharing_scopes self.supported_asset_types = supported_asset_types self.regional_workspace_storage = regional_workspace_storage self.intellectual_property_publisher = intellectual_property_publisher class FeedDtoSupportedAssetTypes(msrest.serialization.Model): """FeedDtoSupportedAssetTypes. :ivar component: :vartype component: ~flow.models.AssetTypeMetaInfo :ivar model: :vartype model: ~flow.models.AssetTypeMetaInfo :ivar environment: :vartype environment: ~flow.models.AssetTypeMetaInfo :ivar dataset: :vartype dataset: ~flow.models.AssetTypeMetaInfo :ivar data_store: :vartype data_store: ~flow.models.AssetTypeMetaInfo :ivar sample_graph: :vartype sample_graph: ~flow.models.AssetTypeMetaInfo :ivar flow_tool: :vartype flow_tool: ~flow.models.AssetTypeMetaInfo :ivar flow_tool_setting: :vartype flow_tool_setting: ~flow.models.AssetTypeMetaInfo :ivar flow_connection: :vartype flow_connection: ~flow.models.AssetTypeMetaInfo :ivar flow_sample: :vartype flow_sample: ~flow.models.AssetTypeMetaInfo :ivar flow_runtime_spec: :vartype flow_runtime_spec: ~flow.models.AssetTypeMetaInfo """ _attribute_map = { 'component': {'key': 'Component', 'type': 'AssetTypeMetaInfo'}, 'model': {'key': 'Model', 'type': 'AssetTypeMetaInfo'}, 'environment': {'key': 'Environment', 'type': 'AssetTypeMetaInfo'}, 'dataset': {'key': 'Dataset', 'type': 'AssetTypeMetaInfo'}, 'data_store': {'key': 'DataStore', 'type': 'AssetTypeMetaInfo'}, 'sample_graph': {'key': 'SampleGraph', 'type': 'AssetTypeMetaInfo'}, 'flow_tool': {'key': 'FlowTool', 'type': 'AssetTypeMetaInfo'}, 'flow_tool_setting': {'key': 'FlowToolSetting', 'type': 'AssetTypeMetaInfo'}, 'flow_connection': {'key': 'FlowConnection', 'type': 'AssetTypeMetaInfo'}, 'flow_sample': {'key': 'FlowSample', 'type': 'AssetTypeMetaInfo'}, 'flow_runtime_spec': {'key': 'FlowRuntimeSpec', 'type': 'AssetTypeMetaInfo'}, } def __init__( self, *, component: Optional["AssetTypeMetaInfo"] = None, model: Optional["AssetTypeMetaInfo"] = None, environment: Optional["AssetTypeMetaInfo"] = None, dataset: Optional["AssetTypeMetaInfo"] = None, data_store: Optional["AssetTypeMetaInfo"] = None, sample_graph: Optional["AssetTypeMetaInfo"] = None, flow_tool: Optional["AssetTypeMetaInfo"] = None, flow_tool_setting: Optional["AssetTypeMetaInfo"] = None, flow_connection: Optional["AssetTypeMetaInfo"] = None, flow_sample: Optional["AssetTypeMetaInfo"] = None, flow_runtime_spec: Optional["AssetTypeMetaInfo"] = None, **kwargs ): """ :keyword component: :paramtype component: ~flow.models.AssetTypeMetaInfo :keyword model: :paramtype model: ~flow.models.AssetTypeMetaInfo :keyword environment: :paramtype environment: ~flow.models.AssetTypeMetaInfo :keyword dataset: :paramtype dataset: ~flow.models.AssetTypeMetaInfo :keyword data_store: :paramtype data_store: ~flow.models.AssetTypeMetaInfo :keyword sample_graph: :paramtype sample_graph: ~flow.models.AssetTypeMetaInfo :keyword flow_tool: :paramtype flow_tool: ~flow.models.AssetTypeMetaInfo :keyword flow_tool_setting: :paramtype flow_tool_setting: ~flow.models.AssetTypeMetaInfo :keyword flow_connection: :paramtype flow_connection: ~flow.models.AssetTypeMetaInfo :keyword flow_sample: :paramtype flow_sample: ~flow.models.AssetTypeMetaInfo :keyword flow_runtime_spec: :paramtype flow_runtime_spec: ~flow.models.AssetTypeMetaInfo """ super(FeedDtoSupportedAssetTypes, self).__init__(**kwargs) self.component = component self.model = model self.environment = environment self.dataset = dataset self.data_store = data_store self.sample_graph = sample_graph self.flow_tool = flow_tool self.flow_tool_setting = flow_tool_setting self.flow_connection = flow_connection self.flow_sample = flow_sample self.flow_runtime_spec = flow_runtime_spec class FileSystem(msrest.serialization.Model): """FileSystem. :ivar connection: :vartype connection: str :ivar path: :vartype path: str """ _attribute_map = { 'connection': {'key': 'connection', 'type': 'str'}, 'path': {'key': 'path', 'type': 'str'}, } def __init__( self, *, connection: Optional[str] = None, path: Optional[str] = None, **kwargs ): """ :keyword connection: :paramtype connection: str :keyword path: :paramtype path: str """ super(FileSystem, self).__init__(**kwargs) self.connection = connection self.path = path class Flow(msrest.serialization.Model): """Flow. :ivar source_resource_id: :vartype source_resource_id: str :ivar flow_graph: :vartype flow_graph: ~flow.models.FlowGraph :ivar node_variants: This is a dictionary. :vartype node_variants: dict[str, ~flow.models.NodeVariant] :ivar flow_graph_layout: :vartype flow_graph_layout: ~flow.models.FlowGraphLayout :ivar bulk_test_data: This is a dictionary. :vartype bulk_test_data: dict[str, str] :ivar evaluation_flows: This is a dictionary. :vartype evaluation_flows: dict[str, ~flow.models.FlowGraphReference] """ _attribute_map = { 'source_resource_id': {'key': 'sourceResourceId', 'type': 'str'}, 'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'}, 'node_variants': {'key': 'nodeVariants', 'type': '{NodeVariant}'}, 'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'}, 'bulk_test_data': {'key': 'bulkTestData', 'type': '{str}'}, 'evaluation_flows': {'key': 'evaluationFlows', 'type': '{FlowGraphReference}'}, } def __init__( self, *, source_resource_id: Optional[str] = None, flow_graph: Optional["FlowGraph"] = None, node_variants: Optional[Dict[str, "NodeVariant"]] = None, flow_graph_layout: Optional["FlowGraphLayout"] = None, bulk_test_data: Optional[Dict[str, str]] = None, evaluation_flows: Optional[Dict[str, "FlowGraphReference"]] = None, **kwargs ): """ :keyword source_resource_id: :paramtype source_resource_id: str :keyword flow_graph: :paramtype flow_graph: ~flow.models.FlowGraph :keyword node_variants: This is a dictionary. :paramtype node_variants: dict[str, ~flow.models.NodeVariant] :keyword flow_graph_layout: :paramtype flow_graph_layout: ~flow.models.FlowGraphLayout :keyword bulk_test_data: This is a dictionary. :paramtype bulk_test_data: dict[str, str] :keyword evaluation_flows: This is a dictionary. :paramtype evaluation_flows: dict[str, ~flow.models.FlowGraphReference] """ super(Flow, self).__init__(**kwargs) self.source_resource_id = source_resource_id self.flow_graph = flow_graph self.node_variants = node_variants self.flow_graph_layout = flow_graph_layout self.bulk_test_data = bulk_test_data self.evaluation_flows = evaluation_flows class FlowAnnotations(msrest.serialization.Model): """FlowAnnotations. :ivar flow_name: :vartype flow_name: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy :ivar is_archived: :vartype is_archived: bool :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar archived: :vartype archived: bool :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] """ _attribute_map = { 'flow_name': {'key': 'flowName', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'archived': {'key': 'archived', 'type': 'bool'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( self, *, flow_name: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, owner: Optional["SchemaContractsCreatedBy"] = None, is_archived: Optional[bool] = None, vm_size: Optional[str] = None, max_idle_time_seconds: Optional[int] = None, name: Optional[str] = None, description: Optional[str] = None, archived: Optional[bool] = None, tags: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword flow_name: :paramtype flow_name: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy :keyword is_archived: :paramtype is_archived: bool :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword archived: :paramtype archived: bool :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] """ super(FlowAnnotations, self).__init__(**kwargs) self.flow_name = flow_name self.created_date = created_date self.last_modified_date = last_modified_date self.owner = owner self.is_archived = is_archived self.vm_size = vm_size self.max_idle_time_seconds = max_idle_time_seconds self.name = name self.description = description self.archived = archived self.tags = tags class FlowBaseDto(msrest.serialization.Model): """FlowBaseDto. :ivar flow_id: :vartype flow_id: str :ivar flow_name: :vartype flow_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar experiment_id: :vartype experiment_id: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy :ivar flow_resource_id: :vartype flow_resource_id: str :ivar is_archived: :vartype is_archived: bool :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar identity: :vartype identity: str """ _attribute_map = { 'flow_id': {'key': 'flowId', 'type': 'str'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, 'flow_resource_id': {'key': 'flowResourceId', 'type': 'str'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, *, flow_id: Optional[str] = None, flow_name: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, flow_type: Optional[Union[str, "FlowType"]] = None, experiment_id: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, owner: Optional["SchemaContractsCreatedBy"] = None, flow_resource_id: Optional[str] = None, is_archived: Optional[bool] = None, flow_definition_file_path: Optional[str] = None, vm_size: Optional[str] = None, max_idle_time_seconds: Optional[int] = None, identity: Optional[str] = None, **kwargs ): """ :keyword flow_id: :paramtype flow_id: str :keyword flow_name: :paramtype flow_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword experiment_id: :paramtype experiment_id: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy :keyword flow_resource_id: :paramtype flow_resource_id: str :keyword is_archived: :paramtype is_archived: bool :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword identity: :paramtype identity: str """ super(FlowBaseDto, self).__init__(**kwargs) self.flow_id = flow_id self.flow_name = flow_name self.description = description self.tags = tags self.flow_type = flow_type self.experiment_id = experiment_id self.created_date = created_date self.last_modified_date = last_modified_date self.owner = owner self.flow_resource_id = flow_resource_id self.is_archived = is_archived self.flow_definition_file_path = flow_definition_file_path self.vm_size = vm_size self.max_idle_time_seconds = max_idle_time_seconds self.identity = identity class FlowDto(msrest.serialization.Model): """FlowDto. :ivar timestamp: :vartype timestamp: ~datetime.datetime :ivar e_tag: Any object. :vartype e_tag: any :ivar flow: :vartype flow: ~flow.models.Flow :ivar flow_run_settings: :vartype flow_run_settings: ~flow.models.FlowRunSettings :ivar flow_run_result: :vartype flow_run_result: ~flow.models.FlowRunResult :ivar flow_test_mode: Possible values include: "Sync", "Async". :vartype flow_test_mode: str or ~flow.models.FlowTestMode :ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :ivar studio_portal_endpoint: :vartype studio_portal_endpoint: str :ivar flow_id: :vartype flow_id: str :ivar flow_name: :vartype flow_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar experiment_id: :vartype experiment_id: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy :ivar flow_resource_id: :vartype flow_resource_id: str :ivar is_archived: :vartype is_archived: bool :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar identity: :vartype identity: str """ _attribute_map = { 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, 'e_tag': {'key': 'eTag', 'type': 'object'}, 'flow': {'key': 'flow', 'type': 'Flow'}, 'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'}, 'flow_run_result': {'key': 'flowRunResult', 'type': 'FlowRunResult'}, 'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'}, 'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'}, 'studio_portal_endpoint': {'key': 'studioPortalEndpoint', 'type': 'str'}, 'flow_id': {'key': 'flowId', 'type': 'str'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, 'flow_resource_id': {'key': 'flowResourceId', 'type': 'str'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, *, timestamp: Optional[datetime.datetime] = None, e_tag: Optional[Any] = None, flow: Optional["Flow"] = None, flow_run_settings: Optional["FlowRunSettings"] = None, flow_run_result: Optional["FlowRunResult"] = None, flow_test_mode: Optional[Union[str, "FlowTestMode"]] = None, flow_test_infos: Optional[Dict[str, "FlowTestInfo"]] = None, studio_portal_endpoint: Optional[str] = None, flow_id: Optional[str] = None, flow_name: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, flow_type: Optional[Union[str, "FlowType"]] = None, experiment_id: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, owner: Optional["SchemaContractsCreatedBy"] = None, flow_resource_id: Optional[str] = None, is_archived: Optional[bool] = None, flow_definition_file_path: Optional[str] = None, vm_size: Optional[str] = None, max_idle_time_seconds: Optional[int] = None, identity: Optional[str] = None, **kwargs ): """ :keyword timestamp: :paramtype timestamp: ~datetime.datetime :keyword e_tag: Any object. :paramtype e_tag: any :keyword flow: :paramtype flow: ~flow.models.Flow :keyword flow_run_settings: :paramtype flow_run_settings: ~flow.models.FlowRunSettings :keyword flow_run_result: :paramtype flow_run_result: ~flow.models.FlowRunResult :keyword flow_test_mode: Possible values include: "Sync", "Async". :paramtype flow_test_mode: str or ~flow.models.FlowTestMode :keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :keyword studio_portal_endpoint: :paramtype studio_portal_endpoint: str :keyword flow_id: :paramtype flow_id: str :keyword flow_name: :paramtype flow_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword experiment_id: :paramtype experiment_id: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy :keyword flow_resource_id: :paramtype flow_resource_id: str :keyword is_archived: :paramtype is_archived: bool :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword identity: :paramtype identity: str """ super(FlowDto, self).__init__(**kwargs) self.timestamp = timestamp self.e_tag = e_tag self.flow = flow self.flow_run_settings = flow_run_settings self.flow_run_result = flow_run_result self.flow_test_mode = flow_test_mode self.flow_test_infos = flow_test_infos self.studio_portal_endpoint = studio_portal_endpoint self.flow_id = flow_id self.flow_name = flow_name self.description = description self.tags = tags self.flow_type = flow_type self.experiment_id = experiment_id self.created_date = created_date self.last_modified_date = last_modified_date self.owner = owner self.flow_resource_id = flow_resource_id self.is_archived = is_archived self.flow_definition_file_path = flow_definition_file_path self.vm_size = vm_size self.max_idle_time_seconds = max_idle_time_seconds self.identity = identity class FlowEnvironment(msrest.serialization.Model): """FlowEnvironment. :ivar image: :vartype image: str :ivar python_requirements_txt: :vartype python_requirements_txt: str """ _attribute_map = { 'image': {'key': 'image', 'type': 'str'}, 'python_requirements_txt': {'key': 'python_requirements_txt', 'type': 'str'}, } def __init__( self, *, image: Optional[str] = None, python_requirements_txt: Optional[str] = None, **kwargs ): """ :keyword image: :paramtype image: str :keyword python_requirements_txt: :paramtype python_requirements_txt: str """ super(FlowEnvironment, self).__init__(**kwargs) self.image = image self.python_requirements_txt = python_requirements_txt class FlowFeature(msrest.serialization.Model): """FlowFeature. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar state: :vartype state: ~flow.models.FlowFeatureState """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'state': {'key': 'state', 'type': 'FlowFeatureState'}, } def __init__( self, *, name: Optional[str] = None, description: Optional[str] = None, state: Optional["FlowFeatureState"] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword state: :paramtype state: ~flow.models.FlowFeatureState """ super(FlowFeature, self).__init__(**kwargs) self.name = name self.description = description self.state = state class FlowFeatureState(msrest.serialization.Model): """FlowFeatureState. :ivar runtime: Possible values include: "Ready", "E2ETest". :vartype runtime: str or ~flow.models.FlowFeatureStateEnum :ivar executor: Possible values include: "Ready", "E2ETest". :vartype executor: str or ~flow.models.FlowFeatureStateEnum :ivar pfs: Possible values include: "Ready", "E2ETest". :vartype pfs: str or ~flow.models.FlowFeatureStateEnum """ _attribute_map = { 'runtime': {'key': 'Runtime', 'type': 'str'}, 'executor': {'key': 'Executor', 'type': 'str'}, 'pfs': {'key': 'PFS', 'type': 'str'}, } def __init__( self, *, runtime: Optional[Union[str, "FlowFeatureStateEnum"]] = None, executor: Optional[Union[str, "FlowFeatureStateEnum"]] = None, pfs: Optional[Union[str, "FlowFeatureStateEnum"]] = None, **kwargs ): """ :keyword runtime: Possible values include: "Ready", "E2ETest". :paramtype runtime: str or ~flow.models.FlowFeatureStateEnum :keyword executor: Possible values include: "Ready", "E2ETest". :paramtype executor: str or ~flow.models.FlowFeatureStateEnum :keyword pfs: Possible values include: "Ready", "E2ETest". :paramtype pfs: str or ~flow.models.FlowFeatureStateEnum """ super(FlowFeatureState, self).__init__(**kwargs) self.runtime = runtime self.executor = executor self.pfs = pfs class FlowGraph(msrest.serialization.Model): """FlowGraph. :ivar nodes: :vartype nodes: list[~flow.models.Node] :ivar tools: :vartype tools: list[~flow.models.Tool] :ivar codes: This is a dictionary. :vartype codes: dict[str, str] :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.FlowInputDefinition] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.FlowOutputDefinition] """ _attribute_map = { 'nodes': {'key': 'nodes', 'type': '[Node]'}, 'tools': {'key': 'tools', 'type': '[Tool]'}, 'codes': {'key': 'codes', 'type': '{str}'}, 'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'}, 'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'}, } def __init__( self, *, nodes: Optional[List["Node"]] = None, tools: Optional[List["Tool"]] = None, codes: Optional[Dict[str, str]] = None, inputs: Optional[Dict[str, "FlowInputDefinition"]] = None, outputs: Optional[Dict[str, "FlowOutputDefinition"]] = None, **kwargs ): """ :keyword nodes: :paramtype nodes: list[~flow.models.Node] :keyword tools: :paramtype tools: list[~flow.models.Tool] :keyword codes: This is a dictionary. :paramtype codes: dict[str, str] :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.FlowInputDefinition] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition] """ super(FlowGraph, self).__init__(**kwargs) self.nodes = nodes self.tools = tools self.codes = codes self.inputs = inputs self.outputs = outputs class FlowGraphAnnotationNode(msrest.serialization.Model): """FlowGraphAnnotationNode. :ivar id: :vartype id: str :ivar content: :vartype content: str :ivar mentioned_node_names: :vartype mentioned_node_names: list[str] :ivar structured_content: :vartype structured_content: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'content': {'key': 'content', 'type': 'str'}, 'mentioned_node_names': {'key': 'mentionedNodeNames', 'type': '[str]'}, 'structured_content': {'key': 'structuredContent', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, content: Optional[str] = None, mentioned_node_names: Optional[List[str]] = None, structured_content: Optional[str] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword content: :paramtype content: str :keyword mentioned_node_names: :paramtype mentioned_node_names: list[str] :keyword structured_content: :paramtype structured_content: str """ super(FlowGraphAnnotationNode, self).__init__(**kwargs) self.id = id self.content = content self.mentioned_node_names = mentioned_node_names self.structured_content = structured_content class FlowGraphLayout(msrest.serialization.Model): """FlowGraphLayout. :ivar node_layouts: This is a dictionary. :vartype node_layouts: dict[str, ~flow.models.FlowNodeLayout] :ivar extended_data: :vartype extended_data: str :ivar annotation_nodes: :vartype annotation_nodes: list[~flow.models.FlowGraphAnnotationNode] :ivar orientation: Possible values include: "Horizontal", "Vertical". :vartype orientation: str or ~flow.models.Orientation """ _attribute_map = { 'node_layouts': {'key': 'nodeLayouts', 'type': '{FlowNodeLayout}'}, 'extended_data': {'key': 'extendedData', 'type': 'str'}, 'annotation_nodes': {'key': 'annotationNodes', 'type': '[FlowGraphAnnotationNode]'}, 'orientation': {'key': 'orientation', 'type': 'str'}, } def __init__( self, *, node_layouts: Optional[Dict[str, "FlowNodeLayout"]] = None, extended_data: Optional[str] = None, annotation_nodes: Optional[List["FlowGraphAnnotationNode"]] = None, orientation: Optional[Union[str, "Orientation"]] = None, **kwargs ): """ :keyword node_layouts: This is a dictionary. :paramtype node_layouts: dict[str, ~flow.models.FlowNodeLayout] :keyword extended_data: :paramtype extended_data: str :keyword annotation_nodes: :paramtype annotation_nodes: list[~flow.models.FlowGraphAnnotationNode] :keyword orientation: Possible values include: "Horizontal", "Vertical". :paramtype orientation: str or ~flow.models.Orientation """ super(FlowGraphLayout, self).__init__(**kwargs) self.node_layouts = node_layouts self.extended_data = extended_data self.annotation_nodes = annotation_nodes self.orientation = orientation class FlowGraphReference(msrest.serialization.Model): """FlowGraphReference. :ivar flow_graph: :vartype flow_graph: ~flow.models.FlowGraph :ivar reference_resource_id: :vartype reference_resource_id: str """ _attribute_map = { 'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'}, 'reference_resource_id': {'key': 'referenceResourceId', 'type': 'str'}, } def __init__( self, *, flow_graph: Optional["FlowGraph"] = None, reference_resource_id: Optional[str] = None, **kwargs ): """ :keyword flow_graph: :paramtype flow_graph: ~flow.models.FlowGraph :keyword reference_resource_id: :paramtype reference_resource_id: str """ super(FlowGraphReference, self).__init__(**kwargs) self.flow_graph = flow_graph self.reference_resource_id = reference_resource_id class FlowIndexEntity(msrest.serialization.Model): """FlowIndexEntity. Variables are only populated by the server, and will be ignored when sending a request. :ivar schema_id: :vartype schema_id: str :ivar entity_id: :vartype entity_id: str :ivar kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned". :vartype kind: str or ~flow.models.EntityKind :ivar annotations: :vartype annotations: ~flow.models.FlowAnnotations :ivar properties: :vartype properties: ~flow.models.FlowProperties :ivar internal: Any object. :vartype internal: any :ivar update_sequence: :vartype update_sequence: long :ivar type: :vartype type: str :ivar version: :vartype version: str :ivar entity_container_id: :vartype entity_container_id: str :ivar entity_object_id: :vartype entity_object_id: str :ivar resource_type: :vartype resource_type: str :ivar relationships: :vartype relationships: list[~flow.models.Relationship] :ivar asset_id: :vartype asset_id: str """ _validation = { 'version': {'readonly': True}, 'entity_container_id': {'readonly': True}, 'entity_object_id': {'readonly': True}, 'resource_type': {'readonly': True}, } _attribute_map = { 'schema_id': {'key': 'schemaId', 'type': 'str'}, 'entity_id': {'key': 'entityId', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': 'FlowAnnotations'}, 'properties': {'key': 'properties', 'type': 'FlowProperties'}, 'internal': {'key': 'internal', 'type': 'object'}, 'update_sequence': {'key': 'updateSequence', 'type': 'long'}, 'type': {'key': 'type', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'entity_container_id': {'key': 'entityContainerId', 'type': 'str'}, 'entity_object_id': {'key': 'entityObjectId', 'type': 'str'}, 'resource_type': {'key': 'resourceType', 'type': 'str'}, 'relationships': {'key': 'relationships', 'type': '[Relationship]'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, } def __init__( self, *, schema_id: Optional[str] = None, entity_id: Optional[str] = None, kind: Optional[Union[str, "EntityKind"]] = None, annotations: Optional["FlowAnnotations"] = None, properties: Optional["FlowProperties"] = None, internal: Optional[Any] = None, update_sequence: Optional[int] = None, type: Optional[str] = None, relationships: Optional[List["Relationship"]] = None, asset_id: Optional[str] = None, **kwargs ): """ :keyword schema_id: :paramtype schema_id: str :keyword entity_id: :paramtype entity_id: str :keyword kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned". :paramtype kind: str or ~flow.models.EntityKind :keyword annotations: :paramtype annotations: ~flow.models.FlowAnnotations :keyword properties: :paramtype properties: ~flow.models.FlowProperties :keyword internal: Any object. :paramtype internal: any :keyword update_sequence: :paramtype update_sequence: long :keyword type: :paramtype type: str :keyword relationships: :paramtype relationships: list[~flow.models.Relationship] :keyword asset_id: :paramtype asset_id: str """ super(FlowIndexEntity, self).__init__(**kwargs) self.schema_id = schema_id self.entity_id = entity_id self.kind = kind self.annotations = annotations self.properties = properties self.internal = internal self.update_sequence = update_sequence self.type = type self.version = None self.entity_container_id = None self.entity_object_id = None self.resource_type = None self.relationships = relationships self.asset_id = asset_id class FlowInputDefinition(msrest.serialization.Model): """FlowInputDefinition. :ivar name: :vartype name: str :ivar type: Possible values include: "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image". :vartype type: str or ~flow.models.ValueType :ivar default: Anything. :vartype default: any :ivar description: :vartype description: str :ivar is_chat_input: :vartype is_chat_input: bool :ivar is_chat_history: :vartype is_chat_history: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'default': {'key': 'default', 'type': 'object'}, 'description': {'key': 'description', 'type': 'str'}, 'is_chat_input': {'key': 'is_chat_input', 'type': 'bool'}, 'is_chat_history': {'key': 'is_chat_history', 'type': 'bool'}, } def __init__( self, *, name: Optional[str] = None, type: Optional[Union[str, "ValueType"]] = None, default: Optional[Any] = None, description: Optional[str] = None, is_chat_input: Optional[bool] = None, is_chat_history: Optional[bool] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: Possible values include: "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image". :paramtype type: str or ~flow.models.ValueType :keyword default: Anything. :paramtype default: any :keyword description: :paramtype description: str :keyword is_chat_input: :paramtype is_chat_input: bool :keyword is_chat_history: :paramtype is_chat_history: bool """ super(FlowInputDefinition, self).__init__(**kwargs) self.name = name self.type = type self.default = default self.description = description self.is_chat_input = is_chat_input self.is_chat_history = is_chat_history class FlowNode(msrest.serialization.Model): """FlowNode. :ivar name: :vartype name: str :ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm", "csharp". :vartype type: str or ~flow.models.ToolType :ivar source: :vartype source: ~flow.models.NodeSource :ivar inputs: Dictionary of :code:`<any>`. :vartype inputs: dict[str, any] :ivar use_variants: :vartype use_variants: bool :ivar activate: :vartype activate: ~flow.models.Activate :ivar comment: :vartype comment: str :ivar api: :vartype api: str :ivar provider: :vartype provider: str :ivar connection: :vartype connection: str :ivar module: :vartype module: str :ivar aggregation: :vartype aggregation: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'source': {'key': 'source', 'type': 'NodeSource'}, 'inputs': {'key': 'inputs', 'type': '{object}'}, 'use_variants': {'key': 'use_variants', 'type': 'bool'}, 'activate': {'key': 'activate', 'type': 'Activate'}, 'comment': {'key': 'comment', 'type': 'str'}, 'api': {'key': 'api', 'type': 'str'}, 'provider': {'key': 'provider', 'type': 'str'}, 'connection': {'key': 'connection', 'type': 'str'}, 'module': {'key': 'module', 'type': 'str'}, 'aggregation': {'key': 'aggregation', 'type': 'bool'}, } def __init__( self, *, name: Optional[str] = None, type: Optional[Union[str, "ToolType"]] = None, source: Optional["NodeSource"] = None, inputs: Optional[Dict[str, Any]] = None, use_variants: Optional[bool] = None, activate: Optional["Activate"] = None, comment: Optional[str] = None, api: Optional[str] = None, provider: Optional[str] = None, connection: Optional[str] = None, module: Optional[str] = None, aggregation: Optional[bool] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm", "csharp". :paramtype type: str or ~flow.models.ToolType :keyword source: :paramtype source: ~flow.models.NodeSource :keyword inputs: Dictionary of :code:`<any>`. :paramtype inputs: dict[str, any] :keyword use_variants: :paramtype use_variants: bool :keyword activate: :paramtype activate: ~flow.models.Activate :keyword comment: :paramtype comment: str :keyword api: :paramtype api: str :keyword provider: :paramtype provider: str :keyword connection: :paramtype connection: str :keyword module: :paramtype module: str :keyword aggregation: :paramtype aggregation: bool """ super(FlowNode, self).__init__(**kwargs) self.name = name self.type = type self.source = source self.inputs = inputs self.use_variants = use_variants self.activate = activate self.comment = comment self.api = api self.provider = provider self.connection = connection self.module = module self.aggregation = aggregation class FlowNodeLayout(msrest.serialization.Model): """FlowNodeLayout. :ivar x: :vartype x: float :ivar y: :vartype y: float :ivar width: :vartype width: float :ivar height: :vartype height: float :ivar index: :vartype index: int :ivar extended_data: :vartype extended_data: str """ _attribute_map = { 'x': {'key': 'x', 'type': 'float'}, 'y': {'key': 'y', 'type': 'float'}, 'width': {'key': 'width', 'type': 'float'}, 'height': {'key': 'height', 'type': 'float'}, 'index': {'key': 'index', 'type': 'int'}, 'extended_data': {'key': 'extendedData', 'type': 'str'}, } def __init__( self, *, x: Optional[float] = None, y: Optional[float] = None, width: Optional[float] = None, height: Optional[float] = None, index: Optional[int] = None, extended_data: Optional[str] = None, **kwargs ): """ :keyword x: :paramtype x: float :keyword y: :paramtype y: float :keyword width: :paramtype width: float :keyword height: :paramtype height: float :keyword index: :paramtype index: int :keyword extended_data: :paramtype extended_data: str """ super(FlowNodeLayout, self).__init__(**kwargs) self.x = x self.y = y self.width = width self.height = height self.index = index self.extended_data = extended_data class FlowNodeVariant(msrest.serialization.Model): """FlowNodeVariant. :ivar default_variant_id: :vartype default_variant_id: str :ivar variants: This is a dictionary. :vartype variants: dict[str, ~flow.models.FlowVariantNode] """ _attribute_map = { 'default_variant_id': {'key': 'default_variant_id', 'type': 'str'}, 'variants': {'key': 'variants', 'type': '{FlowVariantNode}'}, } def __init__( self, *, default_variant_id: Optional[str] = None, variants: Optional[Dict[str, "FlowVariantNode"]] = None, **kwargs ): """ :keyword default_variant_id: :paramtype default_variant_id: str :keyword variants: This is a dictionary. :paramtype variants: dict[str, ~flow.models.FlowVariantNode] """ super(FlowNodeVariant, self).__init__(**kwargs) self.default_variant_id = default_variant_id self.variants = variants class FlowOutputDefinition(msrest.serialization.Model): """FlowOutputDefinition. :ivar name: :vartype name: str :ivar type: Possible values include: "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image". :vartype type: str or ~flow.models.ValueType :ivar description: :vartype description: str :ivar reference: :vartype reference: str :ivar evaluation_only: :vartype evaluation_only: bool :ivar is_chat_output: :vartype is_chat_output: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'reference': {'key': 'reference', 'type': 'str'}, 'evaluation_only': {'key': 'evaluation_only', 'type': 'bool'}, 'is_chat_output': {'key': 'is_chat_output', 'type': 'bool'}, } def __init__( self, *, name: Optional[str] = None, type: Optional[Union[str, "ValueType"]] = None, description: Optional[str] = None, reference: Optional[str] = None, evaluation_only: Optional[bool] = None, is_chat_output: Optional[bool] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: Possible values include: "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image". :paramtype type: str or ~flow.models.ValueType :keyword description: :paramtype description: str :keyword reference: :paramtype reference: str :keyword evaluation_only: :paramtype evaluation_only: bool :keyword is_chat_output: :paramtype is_chat_output: bool """ super(FlowOutputDefinition, self).__init__(**kwargs) self.name = name self.type = type self.description = description self.reference = reference self.evaluation_only = evaluation_only self.is_chat_output = is_chat_output class FlowProperties(msrest.serialization.Model): """FlowProperties. :ivar flow_id: :vartype flow_id: str :ivar experiment_id: :vartype experiment_id: str :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar creation_context: :vartype creation_context: ~flow.models.CreationContext """ _attribute_map = { 'flow_id': {'key': 'flowId', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'creation_context': {'key': 'creationContext', 'type': 'CreationContext'}, } def __init__( self, *, flow_id: Optional[str] = None, experiment_id: Optional[str] = None, flow_type: Optional[Union[str, "FlowType"]] = None, flow_definition_file_path: Optional[str] = None, creation_context: Optional["CreationContext"] = None, **kwargs ): """ :keyword flow_id: :paramtype flow_id: str :keyword experiment_id: :paramtype experiment_id: str :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword creation_context: :paramtype creation_context: ~flow.models.CreationContext """ super(FlowProperties, self).__init__(**kwargs) self.flow_id = flow_id self.experiment_id = experiment_id self.flow_type = flow_type self.flow_definition_file_path = flow_definition_file_path self.creation_context = creation_context class FlowRunBasePath(msrest.serialization.Model): """FlowRunBasePath. :ivar output_datastore_name: :vartype output_datastore_name: str :ivar base_path: :vartype base_path: str """ _attribute_map = { 'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'}, 'base_path': {'key': 'basePath', 'type': 'str'}, } def __init__( self, *, output_datastore_name: Optional[str] = None, base_path: Optional[str] = None, **kwargs ): """ :keyword output_datastore_name: :paramtype output_datastore_name: str :keyword base_path: :paramtype base_path: str """ super(FlowRunBasePath, self).__init__(**kwargs) self.output_datastore_name = output_datastore_name self.base_path = base_path class FlowRunInfo(msrest.serialization.Model): """FlowRunInfo. :ivar flow_graph: :vartype flow_graph: ~flow.models.FlowGraph :ivar flow_graph_layout: :vartype flow_graph_layout: ~flow.models.FlowGraphLayout :ivar flow_name: :vartype flow_name: str :ivar flow_run_resource_id: :vartype flow_run_resource_id: str :ivar flow_run_id: :vartype flow_run_id: str :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar batch_inputs: :vartype batch_inputs: list[dict[str, any]] :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun". :vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar runtime_name: :vartype runtime_name: str :ivar bulk_test_id: :vartype bulk_test_id: str :ivar created_by: :vartype created_by: ~flow.models.SchemaContractsCreatedBy :ivar created_on: :vartype created_on: ~datetime.datetime :ivar inputs_mapping: This is a dictionary. :vartype inputs_mapping: dict[str, str] :ivar output_datastore_name: :vartype output_datastore_name: str :ivar child_run_base_path: :vartype child_run_base_path: str :ivar working_directory: :vartype working_directory: str :ivar flow_dag_file_relative_path: :vartype flow_dag_file_relative_path: str :ivar flow_snapshot_id: :vartype flow_snapshot_id: str :ivar studio_portal_endpoint: :vartype studio_portal_endpoint: str """ _attribute_map = { 'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'}, 'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'}, 'flow_run_id': {'key': 'flowRunId', 'type': 'str'}, 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'flow_run_type': {'key': 'flowRunType', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'}, 'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'}, 'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'}, 'child_run_base_path': {'key': 'childRunBasePath', 'type': 'str'}, 'working_directory': {'key': 'workingDirectory', 'type': 'str'}, 'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'}, 'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'}, 'studio_portal_endpoint': {'key': 'studioPortalEndpoint', 'type': 'str'}, } def __init__( self, *, flow_graph: Optional["FlowGraph"] = None, flow_graph_layout: Optional["FlowGraphLayout"] = None, flow_name: Optional[str] = None, flow_run_resource_id: Optional[str] = None, flow_run_id: Optional[str] = None, flow_run_display_name: Optional[str] = None, batch_inputs: Optional[List[Dict[str, Any]]] = None, batch_data_input: Optional["BatchDataInput"] = None, flow_run_type: Optional[Union[str, "FlowRunTypeEnum"]] = None, flow_type: Optional[Union[str, "FlowType"]] = None, runtime_name: Optional[str] = None, bulk_test_id: Optional[str] = None, created_by: Optional["SchemaContractsCreatedBy"] = None, created_on: Optional[datetime.datetime] = None, inputs_mapping: Optional[Dict[str, str]] = None, output_datastore_name: Optional[str] = None, child_run_base_path: Optional[str] = None, working_directory: Optional[str] = None, flow_dag_file_relative_path: Optional[str] = None, flow_snapshot_id: Optional[str] = None, studio_portal_endpoint: Optional[str] = None, **kwargs ): """ :keyword flow_graph: :paramtype flow_graph: ~flow.models.FlowGraph :keyword flow_graph_layout: :paramtype flow_graph_layout: ~flow.models.FlowGraphLayout :keyword flow_name: :paramtype flow_name: str :keyword flow_run_resource_id: :paramtype flow_run_resource_id: str :keyword flow_run_id: :paramtype flow_run_id: str :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword batch_inputs: :paramtype batch_inputs: list[dict[str, any]] :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun". :paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword runtime_name: :paramtype runtime_name: str :keyword bulk_test_id: :paramtype bulk_test_id: str :keyword created_by: :paramtype created_by: ~flow.models.SchemaContractsCreatedBy :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword inputs_mapping: This is a dictionary. :paramtype inputs_mapping: dict[str, str] :keyword output_datastore_name: :paramtype output_datastore_name: str :keyword child_run_base_path: :paramtype child_run_base_path: str :keyword working_directory: :paramtype working_directory: str :keyword flow_dag_file_relative_path: :paramtype flow_dag_file_relative_path: str :keyword flow_snapshot_id: :paramtype flow_snapshot_id: str :keyword studio_portal_endpoint: :paramtype studio_portal_endpoint: str """ super(FlowRunInfo, self).__init__(**kwargs) self.flow_graph = flow_graph self.flow_graph_layout = flow_graph_layout self.flow_name = flow_name self.flow_run_resource_id = flow_run_resource_id self.flow_run_id = flow_run_id self.flow_run_display_name = flow_run_display_name self.batch_inputs = batch_inputs self.batch_data_input = batch_data_input self.flow_run_type = flow_run_type self.flow_type = flow_type self.runtime_name = runtime_name self.bulk_test_id = bulk_test_id self.created_by = created_by self.created_on = created_on self.inputs_mapping = inputs_mapping self.output_datastore_name = output_datastore_name self.child_run_base_path = child_run_base_path self.working_directory = working_directory self.flow_dag_file_relative_path = flow_dag_file_relative_path self.flow_snapshot_id = flow_snapshot_id self.studio_portal_endpoint = studio_portal_endpoint class FlowRunResult(msrest.serialization.Model): """FlowRunResult. :ivar flow_runs: :vartype flow_runs: list[any] :ivar node_runs: :vartype node_runs: list[any] :ivar error_response: The error response. :vartype error_response: ~flow.models.ErrorResponse :ivar flow_name: :vartype flow_name: str :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar flow_run_id: :vartype flow_run_id: str :ivar flow_graph: :vartype flow_graph: ~flow.models.FlowGraph :ivar flow_graph_layout: :vartype flow_graph_layout: ~flow.models.FlowGraphLayout :ivar flow_run_resource_id: :vartype flow_run_resource_id: str :ivar bulk_test_id: :vartype bulk_test_id: str :ivar batch_inputs: :vartype batch_inputs: list[dict[str, any]] :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar created_by: :vartype created_by: ~flow.models.SchemaContractsCreatedBy :ivar created_on: :vartype created_on: ~datetime.datetime :ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun". :vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar runtime_name: :vartype runtime_name: str :ivar aml_compute_name: :vartype aml_compute_name: str :ivar flow_run_logs: Dictionary of :code:`<string>`. :vartype flow_run_logs: dict[str, str] :ivar flow_test_mode: Possible values include: "Sync", "Async". :vartype flow_test_mode: str or ~flow.models.FlowTestMode :ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :ivar working_directory: :vartype working_directory: str :ivar flow_dag_file_relative_path: :vartype flow_dag_file_relative_path: str :ivar flow_snapshot_id: :vartype flow_snapshot_id: str :ivar variant_run_to_evaluation_runs_id_mapping: Dictionary of <componentsΒ·1k1eaegΒ·schemasΒ·flowrunresultΒ·propertiesΒ·variantruntoevaluationrunsidmappingΒ·additionalproperties>. :vartype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]] """ _attribute_map = { 'flow_runs': {'key': 'flow_runs', 'type': '[object]'}, 'node_runs': {'key': 'node_runs', 'type': '[object]'}, 'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'flow_run_id': {'key': 'flowRunId', 'type': 'str'}, 'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'}, 'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'}, 'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'}, 'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'}, 'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'flow_run_type': {'key': 'flowRunType', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'}, 'flow_run_logs': {'key': 'flowRunLogs', 'type': '{str}'}, 'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'}, 'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'}, 'working_directory': {'key': 'workingDirectory', 'type': 'str'}, 'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'}, 'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'}, 'variant_run_to_evaluation_runs_id_mapping': {'key': 'variantRunToEvaluationRunsIdMapping', 'type': '{[str]}'}, } def __init__( self, *, flow_runs: Optional[List[Any]] = None, node_runs: Optional[List[Any]] = None, error_response: Optional["ErrorResponse"] = None, flow_name: Optional[str] = None, flow_run_display_name: Optional[str] = None, flow_run_id: Optional[str] = None, flow_graph: Optional["FlowGraph"] = None, flow_graph_layout: Optional["FlowGraphLayout"] = None, flow_run_resource_id: Optional[str] = None, bulk_test_id: Optional[str] = None, batch_inputs: Optional[List[Dict[str, Any]]] = None, batch_data_input: Optional["BatchDataInput"] = None, created_by: Optional["SchemaContractsCreatedBy"] = None, created_on: Optional[datetime.datetime] = None, flow_run_type: Optional[Union[str, "FlowRunTypeEnum"]] = None, flow_type: Optional[Union[str, "FlowType"]] = None, runtime_name: Optional[str] = None, aml_compute_name: Optional[str] = None, flow_run_logs: Optional[Dict[str, str]] = None, flow_test_mode: Optional[Union[str, "FlowTestMode"]] = None, flow_test_infos: Optional[Dict[str, "FlowTestInfo"]] = None, working_directory: Optional[str] = None, flow_dag_file_relative_path: Optional[str] = None, flow_snapshot_id: Optional[str] = None, variant_run_to_evaluation_runs_id_mapping: Optional[Dict[str, List[str]]] = None, **kwargs ): """ :keyword flow_runs: :paramtype flow_runs: list[any] :keyword node_runs: :paramtype node_runs: list[any] :keyword error_response: The error response. :paramtype error_response: ~flow.models.ErrorResponse :keyword flow_name: :paramtype flow_name: str :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword flow_run_id: :paramtype flow_run_id: str :keyword flow_graph: :paramtype flow_graph: ~flow.models.FlowGraph :keyword flow_graph_layout: :paramtype flow_graph_layout: ~flow.models.FlowGraphLayout :keyword flow_run_resource_id: :paramtype flow_run_resource_id: str :keyword bulk_test_id: :paramtype bulk_test_id: str :keyword batch_inputs: :paramtype batch_inputs: list[dict[str, any]] :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword created_by: :paramtype created_by: ~flow.models.SchemaContractsCreatedBy :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun". :paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword runtime_name: :paramtype runtime_name: str :keyword aml_compute_name: :paramtype aml_compute_name: str :keyword flow_run_logs: Dictionary of :code:`<string>`. :paramtype flow_run_logs: dict[str, str] :keyword flow_test_mode: Possible values include: "Sync", "Async". :paramtype flow_test_mode: str or ~flow.models.FlowTestMode :keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :keyword working_directory: :paramtype working_directory: str :keyword flow_dag_file_relative_path: :paramtype flow_dag_file_relative_path: str :keyword flow_snapshot_id: :paramtype flow_snapshot_id: str :keyword variant_run_to_evaluation_runs_id_mapping: Dictionary of <componentsΒ·1k1eaegΒ·schemasΒ·flowrunresultΒ·propertiesΒ·variantruntoevaluationrunsidmappingΒ·additionalproperties>. :paramtype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]] """ super(FlowRunResult, self).__init__(**kwargs) self.flow_runs = flow_runs self.node_runs = node_runs self.error_response = error_response self.flow_name = flow_name self.flow_run_display_name = flow_run_display_name self.flow_run_id = flow_run_id self.flow_graph = flow_graph self.flow_graph_layout = flow_graph_layout self.flow_run_resource_id = flow_run_resource_id self.bulk_test_id = bulk_test_id self.batch_inputs = batch_inputs self.batch_data_input = batch_data_input self.created_by = created_by self.created_on = created_on self.flow_run_type = flow_run_type self.flow_type = flow_type self.runtime_name = runtime_name self.aml_compute_name = aml_compute_name self.flow_run_logs = flow_run_logs self.flow_test_mode = flow_test_mode self.flow_test_infos = flow_test_infos self.working_directory = working_directory self.flow_dag_file_relative_path = flow_dag_file_relative_path self.flow_snapshot_id = flow_snapshot_id self.variant_run_to_evaluation_runs_id_mapping = variant_run_to_evaluation_runs_id_mapping class FlowRunSettings(msrest.serialization.Model): """FlowRunSettings. :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval", "PairwiseEval". :vartype run_mode: str or ~flow.models.FlowRunMode :ivar batch_inputs: :vartype batch_inputs: list[dict[str, any]] :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar tuning_node_names: :vartype tuning_node_names: list[str] :ivar tuning_node_settings: This is a dictionary. :vartype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting] :ivar baseline_variant_id: :vartype baseline_variant_id: str :ivar default_variant_id: :vartype default_variant_id: str :ivar variants: This is a dictionary. :vartype variants: dict[str, list[~flow.models.Node]] :ivar variants_tools: :vartype variants_tools: list[~flow.models.Tool] :ivar variants_codes: This is a dictionary. :vartype variants_codes: dict[str, str] :ivar node_name: :vartype node_name: str :ivar bulk_test_id: :vartype bulk_test_id: str :ivar evaluation_flow_run_settings: This is a dictionary. :vartype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings] :ivar inputs_mapping: This is a dictionary. :vartype inputs_mapping: dict[str, str] :ivar data_inputs: This is a dictionary. :vartype data_inputs: dict[str, str] :ivar bulk_test_flow_id: :vartype bulk_test_flow_id: str :ivar bulk_test_flow_run_ids: :vartype bulk_test_flow_run_ids: list[str] :ivar aml_compute_name: :vartype aml_compute_name: str :ivar runtime_name: :vartype runtime_name: str :ivar flow_run_output_directory: :vartype flow_run_output_directory: str """ _attribute_map = { 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'run_mode': {'key': 'runMode', 'type': 'str'}, 'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'tuning_node_names': {'key': 'tuningNodeNames', 'type': '[str]'}, 'tuning_node_settings': {'key': 'tuningNodeSettings', 'type': '{TuningNodeSetting}'}, 'baseline_variant_id': {'key': 'baselineVariantId', 'type': 'str'}, 'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'}, 'variants': {'key': 'variants', 'type': '{[Node]}'}, 'variants_tools': {'key': 'variantsTools', 'type': '[Tool]'}, 'variants_codes': {'key': 'variantsCodes', 'type': '{str}'}, 'node_name': {'key': 'nodeName', 'type': 'str'}, 'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'}, 'evaluation_flow_run_settings': {'key': 'evaluationFlowRunSettings', 'type': '{EvaluationFlowRunSettings}'}, 'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'}, 'data_inputs': {'key': 'dataInputs', 'type': '{str}'}, 'bulk_test_flow_id': {'key': 'bulkTestFlowId', 'type': 'str'}, 'bulk_test_flow_run_ids': {'key': 'bulkTestFlowRunIds', 'type': '[str]'}, 'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'}, } def __init__( self, *, flow_run_display_name: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, run_mode: Optional[Union[str, "FlowRunMode"]] = None, batch_inputs: Optional[List[Dict[str, Any]]] = None, batch_data_input: Optional["BatchDataInput"] = None, tuning_node_names: Optional[List[str]] = None, tuning_node_settings: Optional[Dict[str, "TuningNodeSetting"]] = None, baseline_variant_id: Optional[str] = None, default_variant_id: Optional[str] = None, variants: Optional[Dict[str, List["Node"]]] = None, variants_tools: Optional[List["Tool"]] = None, variants_codes: Optional[Dict[str, str]] = None, node_name: Optional[str] = None, bulk_test_id: Optional[str] = None, evaluation_flow_run_settings: Optional[Dict[str, "EvaluationFlowRunSettings"]] = None, inputs_mapping: Optional[Dict[str, str]] = None, data_inputs: Optional[Dict[str, str]] = None, bulk_test_flow_id: Optional[str] = None, bulk_test_flow_run_ids: Optional[List[str]] = None, aml_compute_name: Optional[str] = None, runtime_name: Optional[str] = None, flow_run_output_directory: Optional[str] = None, **kwargs ): """ :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval", "PairwiseEval". :paramtype run_mode: str or ~flow.models.FlowRunMode :keyword batch_inputs: :paramtype batch_inputs: list[dict[str, any]] :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword tuning_node_names: :paramtype tuning_node_names: list[str] :keyword tuning_node_settings: This is a dictionary. :paramtype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting] :keyword baseline_variant_id: :paramtype baseline_variant_id: str :keyword default_variant_id: :paramtype default_variant_id: str :keyword variants: This is a dictionary. :paramtype variants: dict[str, list[~flow.models.Node]] :keyword variants_tools: :paramtype variants_tools: list[~flow.models.Tool] :keyword variants_codes: This is a dictionary. :paramtype variants_codes: dict[str, str] :keyword node_name: :paramtype node_name: str :keyword bulk_test_id: :paramtype bulk_test_id: str :keyword evaluation_flow_run_settings: This is a dictionary. :paramtype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings] :keyword inputs_mapping: This is a dictionary. :paramtype inputs_mapping: dict[str, str] :keyword data_inputs: This is a dictionary. :paramtype data_inputs: dict[str, str] :keyword bulk_test_flow_id: :paramtype bulk_test_flow_id: str :keyword bulk_test_flow_run_ids: :paramtype bulk_test_flow_run_ids: list[str] :keyword aml_compute_name: :paramtype aml_compute_name: str :keyword runtime_name: :paramtype runtime_name: str :keyword flow_run_output_directory: :paramtype flow_run_output_directory: str """ super(FlowRunSettings, self).__init__(**kwargs) self.flow_run_display_name = flow_run_display_name self.description = description self.tags = tags self.properties = properties self.run_mode = run_mode self.batch_inputs = batch_inputs self.batch_data_input = batch_data_input self.tuning_node_names = tuning_node_names self.tuning_node_settings = tuning_node_settings self.baseline_variant_id = baseline_variant_id self.default_variant_id = default_variant_id self.variants = variants self.variants_tools = variants_tools self.variants_codes = variants_codes self.node_name = node_name self.bulk_test_id = bulk_test_id self.evaluation_flow_run_settings = evaluation_flow_run_settings self.inputs_mapping = inputs_mapping self.data_inputs = data_inputs self.bulk_test_flow_id = bulk_test_flow_id self.bulk_test_flow_run_ids = bulk_test_flow_run_ids self.aml_compute_name = aml_compute_name self.runtime_name = runtime_name self.flow_run_output_directory = flow_run_output_directory class FlowRuntimeCapability(msrest.serialization.Model): """FlowRuntimeCapability. :ivar flow_features: :vartype flow_features: list[~flow.models.FlowFeature] """ _attribute_map = { 'flow_features': {'key': 'flowFeatures', 'type': '[FlowFeature]'}, } def __init__( self, *, flow_features: Optional[List["FlowFeature"]] = None, **kwargs ): """ :keyword flow_features: :paramtype flow_features: list[~flow.models.FlowFeature] """ super(FlowRuntimeCapability, self).__init__(**kwargs) self.flow_features = flow_features class FlowRuntimeDto(msrest.serialization.Model): """FlowRuntimeDto. :ivar runtime_name: :vartype runtime_name: str :ivar runtime_description: :vartype runtime_description: str :ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession". :vartype runtime_type: str or ~flow.models.RuntimeType :ivar environment: :vartype environment: str :ivar status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting", "Stopping". :vartype status: str or ~flow.models.RuntimeStatusEnum :ivar status_message: :vartype status_message: str :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse :ivar from_existing_endpoint: :vartype from_existing_endpoint: bool :ivar endpoint_name: :vartype endpoint_name: str :ivar from_existing_deployment: :vartype from_existing_deployment: bool :ivar deployment_name: :vartype deployment_name: str :ivar identity: :vartype identity: ~flow.models.ManagedServiceIdentity :ivar instance_type: :vartype instance_type: str :ivar instance_count: :vartype instance_count: int :ivar compute_instance_name: :vartype compute_instance_name: str :ivar docker_image: :vartype docker_image: str :ivar published_port: :vartype published_port: int :ivar target_port: :vartype target_port: int :ivar from_existing_custom_app: :vartype from_existing_custom_app: bool :ivar custom_app_name: :vartype custom_app_name: str :ivar assigned_to: :vartype assigned_to: ~flow.models.AssignedUser :ivar endpoint_url: :vartype endpoint_url: str :ivar created_on: :vartype created_on: ~datetime.datetime :ivar modified_on: :vartype modified_on: ~datetime.datetime :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy """ _attribute_map = { 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'runtime_description': {'key': 'runtimeDescription', 'type': 'str'}, 'runtime_type': {'key': 'runtimeType', 'type': 'str'}, 'environment': {'key': 'environment', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'status_message': {'key': 'statusMessage', 'type': 'str'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, 'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'}, 'endpoint_name': {'key': 'endpointName', 'type': 'str'}, 'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'}, 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'}, 'docker_image': {'key': 'dockerImage', 'type': 'str'}, 'published_port': {'key': 'publishedPort', 'type': 'int'}, 'target_port': {'key': 'targetPort', 'type': 'int'}, 'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'}, 'custom_app_name': {'key': 'customAppName', 'type': 'str'}, 'assigned_to': {'key': 'assignedTo', 'type': 'AssignedUser'}, 'endpoint_url': {'key': 'endpointUrl', 'type': 'str'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, } def __init__( self, *, runtime_name: Optional[str] = None, runtime_description: Optional[str] = None, runtime_type: Optional[Union[str, "RuntimeType"]] = None, environment: Optional[str] = None, status: Optional[Union[str, "RuntimeStatusEnum"]] = None, status_message: Optional[str] = None, error: Optional["ErrorResponse"] = None, from_existing_endpoint: Optional[bool] = None, endpoint_name: Optional[str] = None, from_existing_deployment: Optional[bool] = None, deployment_name: Optional[str] = None, identity: Optional["ManagedServiceIdentity"] = None, instance_type: Optional[str] = None, instance_count: Optional[int] = None, compute_instance_name: Optional[str] = None, docker_image: Optional[str] = None, published_port: Optional[int] = None, target_port: Optional[int] = None, from_existing_custom_app: Optional[bool] = None, custom_app_name: Optional[str] = None, assigned_to: Optional["AssignedUser"] = None, endpoint_url: Optional[str] = None, created_on: Optional[datetime.datetime] = None, modified_on: Optional[datetime.datetime] = None, owner: Optional["SchemaContractsCreatedBy"] = None, **kwargs ): """ :keyword runtime_name: :paramtype runtime_name: str :keyword runtime_description: :paramtype runtime_description: str :keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession". :paramtype runtime_type: str or ~flow.models.RuntimeType :keyword environment: :paramtype environment: str :keyword status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting", "Stopping". :paramtype status: str or ~flow.models.RuntimeStatusEnum :keyword status_message: :paramtype status_message: str :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse :keyword from_existing_endpoint: :paramtype from_existing_endpoint: bool :keyword endpoint_name: :paramtype endpoint_name: str :keyword from_existing_deployment: :paramtype from_existing_deployment: bool :keyword deployment_name: :paramtype deployment_name: str :keyword identity: :paramtype identity: ~flow.models.ManagedServiceIdentity :keyword instance_type: :paramtype instance_type: str :keyword instance_count: :paramtype instance_count: int :keyword compute_instance_name: :paramtype compute_instance_name: str :keyword docker_image: :paramtype docker_image: str :keyword published_port: :paramtype published_port: int :keyword target_port: :paramtype target_port: int :keyword from_existing_custom_app: :paramtype from_existing_custom_app: bool :keyword custom_app_name: :paramtype custom_app_name: str :keyword assigned_to: :paramtype assigned_to: ~flow.models.AssignedUser :keyword endpoint_url: :paramtype endpoint_url: str :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword modified_on: :paramtype modified_on: ~datetime.datetime :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy """ super(FlowRuntimeDto, self).__init__(**kwargs) self.runtime_name = runtime_name self.runtime_description = runtime_description self.runtime_type = runtime_type self.environment = environment self.status = status self.status_message = status_message self.error = error self.from_existing_endpoint = from_existing_endpoint self.endpoint_name = endpoint_name self.from_existing_deployment = from_existing_deployment self.deployment_name = deployment_name self.identity = identity self.instance_type = instance_type self.instance_count = instance_count self.compute_instance_name = compute_instance_name self.docker_image = docker_image self.published_port = published_port self.target_port = target_port self.from_existing_custom_app = from_existing_custom_app self.custom_app_name = custom_app_name self.assigned_to = assigned_to self.endpoint_url = endpoint_url self.created_on = created_on self.modified_on = modified_on self.owner = owner class FlowSampleDto(msrest.serialization.Model): """FlowSampleDto. :ivar sample_resource_id: :vartype sample_resource_id: str :ivar section: Possible values include: "Gallery", "Template". :vartype section: str or ~flow.models.Section :ivar index_number: :vartype index_number: int :ivar flow_name: :vartype flow_name: str :ivar description: :vartype description: str :ivar details: :vartype details: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar flow: :vartype flow: ~flow.models.Flow :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar flow_run_settings: :vartype flow_run_settings: ~flow.models.FlowRunSettings :ivar is_archived: :vartype is_archived: bool :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar identity: :vartype identity: str """ _attribute_map = { 'sample_resource_id': {'key': 'sampleResourceId', 'type': 'str'}, 'section': {'key': 'section', 'type': 'str'}, 'index_number': {'key': 'indexNumber', 'type': 'int'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'details': {'key': 'details', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'flow': {'key': 'flow', 'type': 'Flow'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, *, sample_resource_id: Optional[str] = None, section: Optional[Union[str, "Section"]] = None, index_number: Optional[int] = None, flow_name: Optional[str] = None, description: Optional[str] = None, details: Optional[str] = None, tags: Optional[Dict[str, str]] = None, flow: Optional["Flow"] = None, flow_definition_file_path: Optional[str] = None, flow_type: Optional[Union[str, "FlowType"]] = None, flow_run_settings: Optional["FlowRunSettings"] = None, is_archived: Optional[bool] = None, vm_size: Optional[str] = None, max_idle_time_seconds: Optional[int] = None, identity: Optional[str] = None, **kwargs ): """ :keyword sample_resource_id: :paramtype sample_resource_id: str :keyword section: Possible values include: "Gallery", "Template". :paramtype section: str or ~flow.models.Section :keyword index_number: :paramtype index_number: int :keyword flow_name: :paramtype flow_name: str :keyword description: :paramtype description: str :keyword details: :paramtype details: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword flow: :paramtype flow: ~flow.models.Flow :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword flow_run_settings: :paramtype flow_run_settings: ~flow.models.FlowRunSettings :keyword is_archived: :paramtype is_archived: bool :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword identity: :paramtype identity: str """ super(FlowSampleDto, self).__init__(**kwargs) self.sample_resource_id = sample_resource_id self.section = section self.index_number = index_number self.flow_name = flow_name self.description = description self.details = details self.tags = tags self.flow = flow self.flow_definition_file_path = flow_definition_file_path self.flow_type = flow_type self.flow_run_settings = flow_run_settings self.is_archived = is_archived self.vm_size = vm_size self.max_idle_time_seconds = max_idle_time_seconds self.identity = identity class FlowSessionDto(msrest.serialization.Model): """FlowSessionDto. :ivar session_id: :vartype session_id: str :ivar base_image: :vartype base_image: str :ivar packages: :vartype packages: list[str] :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar flow_features: :vartype flow_features: list[~flow.models.FlowFeature] :ivar runtime_name: :vartype runtime_name: str :ivar runtime_description: :vartype runtime_description: str :ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession". :vartype runtime_type: str or ~flow.models.RuntimeType :ivar environment: :vartype environment: str :ivar status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting", "Stopping". :vartype status: str or ~flow.models.RuntimeStatusEnum :ivar status_message: :vartype status_message: str :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse :ivar from_existing_endpoint: :vartype from_existing_endpoint: bool :ivar endpoint_name: :vartype endpoint_name: str :ivar from_existing_deployment: :vartype from_existing_deployment: bool :ivar deployment_name: :vartype deployment_name: str :ivar identity: :vartype identity: ~flow.models.ManagedServiceIdentity :ivar instance_type: :vartype instance_type: str :ivar instance_count: :vartype instance_count: int :ivar compute_instance_name: :vartype compute_instance_name: str :ivar docker_image: :vartype docker_image: str :ivar published_port: :vartype published_port: int :ivar target_port: :vartype target_port: int :ivar from_existing_custom_app: :vartype from_existing_custom_app: bool :ivar custom_app_name: :vartype custom_app_name: str :ivar assigned_to: :vartype assigned_to: ~flow.models.AssignedUser :ivar endpoint_url: :vartype endpoint_url: str :ivar created_on: :vartype created_on: ~datetime.datetime :ivar modified_on: :vartype modified_on: ~datetime.datetime :ivar owner: :vartype owner: ~flow.models.SchemaContractsCreatedBy """ _attribute_map = { 'session_id': {'key': 'sessionId', 'type': 'str'}, 'base_image': {'key': 'baseImage', 'type': 'str'}, 'packages': {'key': 'packages', 'type': '[str]'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'flow_features': {'key': 'flowFeatures', 'type': '[FlowFeature]'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'runtime_description': {'key': 'runtimeDescription', 'type': 'str'}, 'runtime_type': {'key': 'runtimeType', 'type': 'str'}, 'environment': {'key': 'environment', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'status_message': {'key': 'statusMessage', 'type': 'str'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, 'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'}, 'endpoint_name': {'key': 'endpointName', 'type': 'str'}, 'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'}, 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, 'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'}, 'docker_image': {'key': 'dockerImage', 'type': 'str'}, 'published_port': {'key': 'publishedPort', 'type': 'int'}, 'target_port': {'key': 'targetPort', 'type': 'int'}, 'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'}, 'custom_app_name': {'key': 'customAppName', 'type': 'str'}, 'assigned_to': {'key': 'assignedTo', 'type': 'AssignedUser'}, 'endpoint_url': {'key': 'endpointUrl', 'type': 'str'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'}, } def __init__( self, *, session_id: Optional[str] = None, base_image: Optional[str] = None, packages: Optional[List[str]] = None, vm_size: Optional[str] = None, max_idle_time_seconds: Optional[int] = None, flow_features: Optional[List["FlowFeature"]] = None, runtime_name: Optional[str] = None, runtime_description: Optional[str] = None, runtime_type: Optional[Union[str, "RuntimeType"]] = None, environment: Optional[str] = None, status: Optional[Union[str, "RuntimeStatusEnum"]] = None, status_message: Optional[str] = None, error: Optional["ErrorResponse"] = None, from_existing_endpoint: Optional[bool] = None, endpoint_name: Optional[str] = None, from_existing_deployment: Optional[bool] = None, deployment_name: Optional[str] = None, identity: Optional["ManagedServiceIdentity"] = None, instance_type: Optional[str] = None, instance_count: Optional[int] = None, compute_instance_name: Optional[str] = None, docker_image: Optional[str] = None, published_port: Optional[int] = None, target_port: Optional[int] = None, from_existing_custom_app: Optional[bool] = None, custom_app_name: Optional[str] = None, assigned_to: Optional["AssignedUser"] = None, endpoint_url: Optional[str] = None, created_on: Optional[datetime.datetime] = None, modified_on: Optional[datetime.datetime] = None, owner: Optional["SchemaContractsCreatedBy"] = None, **kwargs ): """ :keyword session_id: :paramtype session_id: str :keyword base_image: :paramtype base_image: str :keyword packages: :paramtype packages: list[str] :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword flow_features: :paramtype flow_features: list[~flow.models.FlowFeature] :keyword runtime_name: :paramtype runtime_name: str :keyword runtime_description: :paramtype runtime_description: str :keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance", "TrainingSession". :paramtype runtime_type: str or ~flow.models.RuntimeType :keyword environment: :paramtype environment: str :keyword status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting", "Stopping". :paramtype status: str or ~flow.models.RuntimeStatusEnum :keyword status_message: :paramtype status_message: str :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse :keyword from_existing_endpoint: :paramtype from_existing_endpoint: bool :keyword endpoint_name: :paramtype endpoint_name: str :keyword from_existing_deployment: :paramtype from_existing_deployment: bool :keyword deployment_name: :paramtype deployment_name: str :keyword identity: :paramtype identity: ~flow.models.ManagedServiceIdentity :keyword instance_type: :paramtype instance_type: str :keyword instance_count: :paramtype instance_count: int :keyword compute_instance_name: :paramtype compute_instance_name: str :keyword docker_image: :paramtype docker_image: str :keyword published_port: :paramtype published_port: int :keyword target_port: :paramtype target_port: int :keyword from_existing_custom_app: :paramtype from_existing_custom_app: bool :keyword custom_app_name: :paramtype custom_app_name: str :keyword assigned_to: :paramtype assigned_to: ~flow.models.AssignedUser :keyword endpoint_url: :paramtype endpoint_url: str :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword modified_on: :paramtype modified_on: ~datetime.datetime :keyword owner: :paramtype owner: ~flow.models.SchemaContractsCreatedBy """ super(FlowSessionDto, self).__init__(**kwargs) self.session_id = session_id self.base_image = base_image self.packages = packages self.vm_size = vm_size self.max_idle_time_seconds = max_idle_time_seconds self.flow_features = flow_features self.runtime_name = runtime_name self.runtime_description = runtime_description self.runtime_type = runtime_type self.environment = environment self.status = status self.status_message = status_message self.error = error self.from_existing_endpoint = from_existing_endpoint self.endpoint_name = endpoint_name self.from_existing_deployment = from_existing_deployment self.deployment_name = deployment_name self.identity = identity self.instance_type = instance_type self.instance_count = instance_count self.compute_instance_name = compute_instance_name self.docker_image = docker_image self.published_port = published_port self.target_port = target_port self.from_existing_custom_app = from_existing_custom_app self.custom_app_name = custom_app_name self.assigned_to = assigned_to self.endpoint_url = endpoint_url self.created_on = created_on self.modified_on = modified_on self.owner = owner class FlowSnapshot(msrest.serialization.Model): """FlowSnapshot. :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.FlowInputDefinition] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.FlowOutputDefinition] :ivar nodes: :vartype nodes: list[~flow.models.FlowNode] :ivar node_variants: This is a dictionary. :vartype node_variants: dict[str, ~flow.models.FlowNodeVariant] :ivar environment: :vartype environment: ~flow.models.FlowEnvironment :ivar environment_variables: This is a dictionary. :vartype environment_variables: dict[str, any] :ivar language: Possible values include: "Python", "CSharp". :vartype language: str or ~flow.models.FlowLanguage """ _attribute_map = { 'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'}, 'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'}, 'nodes': {'key': 'nodes', 'type': '[FlowNode]'}, 'node_variants': {'key': 'node_variants', 'type': '{FlowNodeVariant}'}, 'environment': {'key': 'environment', 'type': 'FlowEnvironment'}, 'environment_variables': {'key': 'environment_variables', 'type': '{object}'}, 'language': {'key': 'language', 'type': 'str'}, } def __init__( self, *, inputs: Optional[Dict[str, "FlowInputDefinition"]] = None, outputs: Optional[Dict[str, "FlowOutputDefinition"]] = None, nodes: Optional[List["FlowNode"]] = None, node_variants: Optional[Dict[str, "FlowNodeVariant"]] = None, environment: Optional["FlowEnvironment"] = None, environment_variables: Optional[Dict[str, Any]] = None, language: Optional[Union[str, "FlowLanguage"]] = None, **kwargs ): """ :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.FlowInputDefinition] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition] :keyword nodes: :paramtype nodes: list[~flow.models.FlowNode] :keyword node_variants: This is a dictionary. :paramtype node_variants: dict[str, ~flow.models.FlowNodeVariant] :keyword environment: :paramtype environment: ~flow.models.FlowEnvironment :keyword environment_variables: This is a dictionary. :paramtype environment_variables: dict[str, any] :keyword language: Possible values include: "Python", "CSharp". :paramtype language: str or ~flow.models.FlowLanguage """ super(FlowSnapshot, self).__init__(**kwargs) self.inputs = inputs self.outputs = outputs self.nodes = nodes self.node_variants = node_variants self.environment = environment self.environment_variables = environment_variables self.language = language class FlowSubmitRunSettings(msrest.serialization.Model): """FlowSubmitRunSettings. :ivar node_inputs: This is a dictionary. :vartype node_inputs: dict[str, any] :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval", "PairwiseEval". :vartype run_mode: str or ~flow.models.FlowRunMode :ivar batch_inputs: :vartype batch_inputs: list[dict[str, any]] :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar tuning_node_names: :vartype tuning_node_names: list[str] :ivar tuning_node_settings: This is a dictionary. :vartype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting] :ivar baseline_variant_id: :vartype baseline_variant_id: str :ivar default_variant_id: :vartype default_variant_id: str :ivar variants: This is a dictionary. :vartype variants: dict[str, list[~flow.models.Node]] :ivar variants_tools: :vartype variants_tools: list[~flow.models.Tool] :ivar variants_codes: This is a dictionary. :vartype variants_codes: dict[str, str] :ivar node_name: :vartype node_name: str :ivar bulk_test_id: :vartype bulk_test_id: str :ivar evaluation_flow_run_settings: This is a dictionary. :vartype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings] :ivar inputs_mapping: This is a dictionary. :vartype inputs_mapping: dict[str, str] :ivar data_inputs: This is a dictionary. :vartype data_inputs: dict[str, str] :ivar bulk_test_flow_id: :vartype bulk_test_flow_id: str :ivar bulk_test_flow_run_ids: :vartype bulk_test_flow_run_ids: list[str] :ivar aml_compute_name: :vartype aml_compute_name: str :ivar runtime_name: :vartype runtime_name: str :ivar flow_run_output_directory: :vartype flow_run_output_directory: str """ _attribute_map = { 'node_inputs': {'key': 'nodeInputs', 'type': '{object}'}, 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'run_mode': {'key': 'runMode', 'type': 'str'}, 'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'tuning_node_names': {'key': 'tuningNodeNames', 'type': '[str]'}, 'tuning_node_settings': {'key': 'tuningNodeSettings', 'type': '{TuningNodeSetting}'}, 'baseline_variant_id': {'key': 'baselineVariantId', 'type': 'str'}, 'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'}, 'variants': {'key': 'variants', 'type': '{[Node]}'}, 'variants_tools': {'key': 'variantsTools', 'type': '[Tool]'}, 'variants_codes': {'key': 'variantsCodes', 'type': '{str}'}, 'node_name': {'key': 'nodeName', 'type': 'str'}, 'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'}, 'evaluation_flow_run_settings': {'key': 'evaluationFlowRunSettings', 'type': '{EvaluationFlowRunSettings}'}, 'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'}, 'data_inputs': {'key': 'dataInputs', 'type': '{str}'}, 'bulk_test_flow_id': {'key': 'bulkTestFlowId', 'type': 'str'}, 'bulk_test_flow_run_ids': {'key': 'bulkTestFlowRunIds', 'type': '[str]'}, 'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'}, } def __init__( self, *, node_inputs: Optional[Dict[str, Any]] = None, flow_run_display_name: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, run_mode: Optional[Union[str, "FlowRunMode"]] = None, batch_inputs: Optional[List[Dict[str, Any]]] = None, batch_data_input: Optional["BatchDataInput"] = None, tuning_node_names: Optional[List[str]] = None, tuning_node_settings: Optional[Dict[str, "TuningNodeSetting"]] = None, baseline_variant_id: Optional[str] = None, default_variant_id: Optional[str] = None, variants: Optional[Dict[str, List["Node"]]] = None, variants_tools: Optional[List["Tool"]] = None, variants_codes: Optional[Dict[str, str]] = None, node_name: Optional[str] = None, bulk_test_id: Optional[str] = None, evaluation_flow_run_settings: Optional[Dict[str, "EvaluationFlowRunSettings"]] = None, inputs_mapping: Optional[Dict[str, str]] = None, data_inputs: Optional[Dict[str, str]] = None, bulk_test_flow_id: Optional[str] = None, bulk_test_flow_run_ids: Optional[List[str]] = None, aml_compute_name: Optional[str] = None, runtime_name: Optional[str] = None, flow_run_output_directory: Optional[str] = None, **kwargs ): """ :keyword node_inputs: This is a dictionary. :paramtype node_inputs: dict[str, any] :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval", "PairwiseEval". :paramtype run_mode: str or ~flow.models.FlowRunMode :keyword batch_inputs: :paramtype batch_inputs: list[dict[str, any]] :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword tuning_node_names: :paramtype tuning_node_names: list[str] :keyword tuning_node_settings: This is a dictionary. :paramtype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting] :keyword baseline_variant_id: :paramtype baseline_variant_id: str :keyword default_variant_id: :paramtype default_variant_id: str :keyword variants: This is a dictionary. :paramtype variants: dict[str, list[~flow.models.Node]] :keyword variants_tools: :paramtype variants_tools: list[~flow.models.Tool] :keyword variants_codes: This is a dictionary. :paramtype variants_codes: dict[str, str] :keyword node_name: :paramtype node_name: str :keyword bulk_test_id: :paramtype bulk_test_id: str :keyword evaluation_flow_run_settings: This is a dictionary. :paramtype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings] :keyword inputs_mapping: This is a dictionary. :paramtype inputs_mapping: dict[str, str] :keyword data_inputs: This is a dictionary. :paramtype data_inputs: dict[str, str] :keyword bulk_test_flow_id: :paramtype bulk_test_flow_id: str :keyword bulk_test_flow_run_ids: :paramtype bulk_test_flow_run_ids: list[str] :keyword aml_compute_name: :paramtype aml_compute_name: str :keyword runtime_name: :paramtype runtime_name: str :keyword flow_run_output_directory: :paramtype flow_run_output_directory: str """ super(FlowSubmitRunSettings, self).__init__(**kwargs) self.node_inputs = node_inputs self.flow_run_display_name = flow_run_display_name self.description = description self.tags = tags self.properties = properties self.run_mode = run_mode self.batch_inputs = batch_inputs self.batch_data_input = batch_data_input self.tuning_node_names = tuning_node_names self.tuning_node_settings = tuning_node_settings self.baseline_variant_id = baseline_variant_id self.default_variant_id = default_variant_id self.variants = variants self.variants_tools = variants_tools self.variants_codes = variants_codes self.node_name = node_name self.bulk_test_id = bulk_test_id self.evaluation_flow_run_settings = evaluation_flow_run_settings self.inputs_mapping = inputs_mapping self.data_inputs = data_inputs self.bulk_test_flow_id = bulk_test_flow_id self.bulk_test_flow_run_ids = bulk_test_flow_run_ids self.aml_compute_name = aml_compute_name self.runtime_name = runtime_name self.flow_run_output_directory = flow_run_output_directory class FlowTestInfo(msrest.serialization.Model): """FlowTestInfo. :ivar flow_run_id: :vartype flow_run_id: str :ivar flow_test_storage_setting: :vartype flow_test_storage_setting: ~flow.models.FlowTestStorageSetting """ _attribute_map = { 'flow_run_id': {'key': 'flowRunId', 'type': 'str'}, 'flow_test_storage_setting': {'key': 'flowTestStorageSetting', 'type': 'FlowTestStorageSetting'}, } def __init__( self, *, flow_run_id: Optional[str] = None, flow_test_storage_setting: Optional["FlowTestStorageSetting"] = None, **kwargs ): """ :keyword flow_run_id: :paramtype flow_run_id: str :keyword flow_test_storage_setting: :paramtype flow_test_storage_setting: ~flow.models.FlowTestStorageSetting """ super(FlowTestInfo, self).__init__(**kwargs) self.flow_run_id = flow_run_id self.flow_test_storage_setting = flow_test_storage_setting class FlowTestStorageSetting(msrest.serialization.Model): """FlowTestStorageSetting. :ivar storage_account_name: :vartype storage_account_name: str :ivar blob_container_name: :vartype blob_container_name: str :ivar flow_artifacts_root_path: :vartype flow_artifacts_root_path: str :ivar output_datastore_name: :vartype output_datastore_name: str """ _attribute_map = { 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, 'blob_container_name': {'key': 'blobContainerName', 'type': 'str'}, 'flow_artifacts_root_path': {'key': 'flowArtifactsRootPath', 'type': 'str'}, 'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'}, } def __init__( self, *, storage_account_name: Optional[str] = None, blob_container_name: Optional[str] = None, flow_artifacts_root_path: Optional[str] = None, output_datastore_name: Optional[str] = None, **kwargs ): """ :keyword storage_account_name: :paramtype storage_account_name: str :keyword blob_container_name: :paramtype blob_container_name: str :keyword flow_artifacts_root_path: :paramtype flow_artifacts_root_path: str :keyword output_datastore_name: :paramtype output_datastore_name: str """ super(FlowTestStorageSetting, self).__init__(**kwargs) self.storage_account_name = storage_account_name self.blob_container_name = blob_container_name self.flow_artifacts_root_path = flow_artifacts_root_path self.output_datastore_name = output_datastore_name class FlowToolsDto(msrest.serialization.Model): """FlowToolsDto. :ivar package: This is a dictionary. :vartype package: dict[str, ~flow.models.Tool] :ivar code: This is a dictionary. :vartype code: dict[str, ~flow.models.Tool] :ivar errors: This is a dictionary. :vartype errors: dict[str, ~flow.models.ErrorResponse] """ _attribute_map = { 'package': {'key': 'package', 'type': '{Tool}'}, 'code': {'key': 'code', 'type': '{Tool}'}, 'errors': {'key': 'errors', 'type': '{ErrorResponse}'}, } def __init__( self, *, package: Optional[Dict[str, "Tool"]] = None, code: Optional[Dict[str, "Tool"]] = None, errors: Optional[Dict[str, "ErrorResponse"]] = None, **kwargs ): """ :keyword package: This is a dictionary. :paramtype package: dict[str, ~flow.models.Tool] :keyword code: This is a dictionary. :paramtype code: dict[str, ~flow.models.Tool] :keyword errors: This is a dictionary. :paramtype errors: dict[str, ~flow.models.ErrorResponse] """ super(FlowToolsDto, self).__init__(**kwargs) self.package = package self.code = code self.errors = errors class FlowToolSettingParameter(msrest.serialization.Model): """FlowToolSettingParameter. :ivar type: :vartype type: list[str or ~flow.models.ValueType] :ivar default: :vartype default: str :ivar advanced: :vartype advanced: bool :ivar enum: :vartype enum: list[any] :ivar model_list: :vartype model_list: list[str] :ivar text_box_size: :vartype text_box_size: int :ivar capabilities: :vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities :ivar allow_manual_entry: :vartype allow_manual_entry: bool """ _attribute_map = { 'type': {'key': 'type', 'type': '[str]'}, 'default': {'key': 'default', 'type': 'str'}, 'advanced': {'key': 'advanced', 'type': 'bool'}, 'enum': {'key': 'enum', 'type': '[object]'}, 'model_list': {'key': 'model_list', 'type': '[str]'}, 'text_box_size': {'key': 'text_box_size', 'type': 'int'}, 'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'}, 'allow_manual_entry': {'key': 'allow_manual_entry', 'type': 'bool'}, } def __init__( self, *, type: Optional[List[Union[str, "ValueType"]]] = None, default: Optional[str] = None, advanced: Optional[bool] = None, enum: Optional[List[Any]] = None, model_list: Optional[List[str]] = None, text_box_size: Optional[int] = None, capabilities: Optional["AzureOpenAIModelCapabilities"] = None, allow_manual_entry: Optional[bool] = None, **kwargs ): """ :keyword type: :paramtype type: list[str or ~flow.models.ValueType] :keyword default: :paramtype default: str :keyword advanced: :paramtype advanced: bool :keyword enum: :paramtype enum: list[any] :keyword model_list: :paramtype model_list: list[str] :keyword text_box_size: :paramtype text_box_size: int :keyword capabilities: :paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities :keyword allow_manual_entry: :paramtype allow_manual_entry: bool """ super(FlowToolSettingParameter, self).__init__(**kwargs) self.type = type self.default = default self.advanced = advanced self.enum = enum self.model_list = model_list self.text_box_size = text_box_size self.capabilities = capabilities self.allow_manual_entry = allow_manual_entry class FlowVariantNode(msrest.serialization.Model): """FlowVariantNode. :ivar node: :vartype node: ~flow.models.FlowNode :ivar description: :vartype description: str """ _attribute_map = { 'node': {'key': 'node', 'type': 'FlowNode'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__( self, *, node: Optional["FlowNode"] = None, description: Optional[str] = None, **kwargs ): """ :keyword node: :paramtype node: ~flow.models.FlowNode :keyword description: :paramtype description: str """ super(FlowVariantNode, self).__init__(**kwargs) self.node = node self.description = description class ForecastHorizon(msrest.serialization.Model): """ForecastHorizon. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.ForecastHorizonMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, *, mode: Optional[Union[str, "ForecastHorizonMode"]] = None, value: Optional[int] = None, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.ForecastHorizonMode :keyword value: :paramtype value: int """ super(ForecastHorizon, self).__init__(**kwargs) self.mode = mode self.value = value class ForecastingSettings(msrest.serialization.Model): """ForecastingSettings. :ivar country_or_region_for_holidays: :vartype country_or_region_for_holidays: str :ivar time_column_name: :vartype time_column_name: str :ivar target_lags: :vartype target_lags: ~flow.models.TargetLags :ivar target_rolling_window_size: :vartype target_rolling_window_size: ~flow.models.TargetRollingWindowSize :ivar forecast_horizon: :vartype forecast_horizon: ~flow.models.ForecastHorizon :ivar time_series_id_column_names: :vartype time_series_id_column_names: list[str] :ivar frequency: :vartype frequency: str :ivar feature_lags: :vartype feature_lags: str :ivar seasonality: :vartype seasonality: ~flow.models.Seasonality :ivar short_series_handling_config: Possible values include: "Auto", "Pad", "Drop". :vartype short_series_handling_config: str or ~flow.models.ShortSeriesHandlingConfiguration :ivar use_stl: Possible values include: "Season", "SeasonTrend". :vartype use_stl: str or ~flow.models.UseStl :ivar target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean". :vartype target_aggregate_function: str or ~flow.models.TargetAggregationFunction :ivar cv_step_size: :vartype cv_step_size: int :ivar features_unknown_at_forecast_time: :vartype features_unknown_at_forecast_time: list[str] """ _attribute_map = { 'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'}, 'time_column_name': {'key': 'timeColumnName', 'type': 'str'}, 'target_lags': {'key': 'targetLags', 'type': 'TargetLags'}, 'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'TargetRollingWindowSize'}, 'forecast_horizon': {'key': 'forecastHorizon', 'type': 'ForecastHorizon'}, 'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'}, 'frequency': {'key': 'frequency', 'type': 'str'}, 'feature_lags': {'key': 'featureLags', 'type': 'str'}, 'seasonality': {'key': 'seasonality', 'type': 'Seasonality'}, 'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'}, 'use_stl': {'key': 'useStl', 'type': 'str'}, 'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'}, 'cv_step_size': {'key': 'cvStepSize', 'type': 'int'}, 'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'}, } def __init__( self, *, country_or_region_for_holidays: Optional[str] = None, time_column_name: Optional[str] = None, target_lags: Optional["TargetLags"] = None, target_rolling_window_size: Optional["TargetRollingWindowSize"] = None, forecast_horizon: Optional["ForecastHorizon"] = None, time_series_id_column_names: Optional[List[str]] = None, frequency: Optional[str] = None, feature_lags: Optional[str] = None, seasonality: Optional["Seasonality"] = None, short_series_handling_config: Optional[Union[str, "ShortSeriesHandlingConfiguration"]] = None, use_stl: Optional[Union[str, "UseStl"]] = None, target_aggregate_function: Optional[Union[str, "TargetAggregationFunction"]] = None, cv_step_size: Optional[int] = None, features_unknown_at_forecast_time: Optional[List[str]] = None, **kwargs ): """ :keyword country_or_region_for_holidays: :paramtype country_or_region_for_holidays: str :keyword time_column_name: :paramtype time_column_name: str :keyword target_lags: :paramtype target_lags: ~flow.models.TargetLags :keyword target_rolling_window_size: :paramtype target_rolling_window_size: ~flow.models.TargetRollingWindowSize :keyword forecast_horizon: :paramtype forecast_horizon: ~flow.models.ForecastHorizon :keyword time_series_id_column_names: :paramtype time_series_id_column_names: list[str] :keyword frequency: :paramtype frequency: str :keyword feature_lags: :paramtype feature_lags: str :keyword seasonality: :paramtype seasonality: ~flow.models.Seasonality :keyword short_series_handling_config: Possible values include: "Auto", "Pad", "Drop". :paramtype short_series_handling_config: str or ~flow.models.ShortSeriesHandlingConfiguration :keyword use_stl: Possible values include: "Season", "SeasonTrend". :paramtype use_stl: str or ~flow.models.UseStl :keyword target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean". :paramtype target_aggregate_function: str or ~flow.models.TargetAggregationFunction :keyword cv_step_size: :paramtype cv_step_size: int :keyword features_unknown_at_forecast_time: :paramtype features_unknown_at_forecast_time: list[str] """ super(ForecastingSettings, self).__init__(**kwargs) self.country_or_region_for_holidays = country_or_region_for_holidays self.time_column_name = time_column_name self.target_lags = target_lags self.target_rolling_window_size = target_rolling_window_size self.forecast_horizon = forecast_horizon self.time_series_id_column_names = time_series_id_column_names self.frequency = frequency self.feature_lags = feature_lags self.seasonality = seasonality self.short_series_handling_config = short_series_handling_config self.use_stl = use_stl self.target_aggregate_function = target_aggregate_function self.cv_step_size = cv_step_size self.features_unknown_at_forecast_time = features_unknown_at_forecast_time class GeneralSettings(msrest.serialization.Model): """GeneralSettings. :ivar primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError", "NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou". :vartype primary_metric: str or ~flow.models.PrimaryMetrics :ivar task_type: Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER", "TextClassificationMultilabel". :vartype task_type: str or ~flow.models.TaskType :ivar log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error", "Critical". :vartype log_verbosity: str or ~flow.models.LogVerbosity """ _attribute_map = { 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, 'task_type': {'key': 'taskType', 'type': 'str'}, 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, } def __init__( self, *, primary_metric: Optional[Union[str, "PrimaryMetrics"]] = None, task_type: Optional[Union[str, "TaskType"]] = None, log_verbosity: Optional[Union[str, "LogVerbosity"]] = None, **kwargs ): """ :keyword primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError", "NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou". :paramtype primary_metric: str or ~flow.models.PrimaryMetrics :keyword task_type: Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER", "TextClassificationMultilabel". :paramtype task_type: str or ~flow.models.TaskType :keyword log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error", "Critical". :paramtype log_verbosity: str or ~flow.models.LogVerbosity """ super(GeneralSettings, self).__init__(**kwargs) self.primary_metric = primary_metric self.task_type = task_type self.log_verbosity = log_verbosity class GeneratePipelineComponentRequest(msrest.serialization.Model): """GeneratePipelineComponentRequest. :ivar name: :vartype name: str :ivar display_name: :vartype display_name: str :ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated". :vartype module_scope: str or ~flow.models.ModuleScope :ivar is_deterministic: :vartype is_deterministic: bool :ivar category: :vartype category: str :ivar version: :vartype version: str :ivar set_as_default_version: :vartype set_as_default_version: bool :ivar registry_name: :vartype registry_name: str :ivar graph: :vartype graph: ~flow.models.GraphDraftEntity :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar description: :vartype description: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'module_scope': {'key': 'moduleScope', 'type': 'str'}, 'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'}, 'category': {'key': 'category', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'}, 'registry_name': {'key': 'registryName', 'type': 'str'}, 'graph': {'key': 'graph', 'type': 'GraphDraftEntity'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, display_name: Optional[str] = None, module_scope: Optional[Union[str, "ModuleScope"]] = None, is_deterministic: Optional[bool] = None, category: Optional[str] = None, version: Optional[str] = None, set_as_default_version: Optional[bool] = None, registry_name: Optional[str] = None, graph: Optional["GraphDraftEntity"] = None, pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None, module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None, module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None, tags: Optional[Dict[str, str]] = None, continue_run_on_step_failure: Optional[bool] = None, description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, enforce_rerun: Optional[bool] = None, dataset_access_modes: Optional[Union[str, "DatasetAccessModes"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword display_name: :paramtype display_name: str :keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated". :paramtype module_scope: str or ~flow.models.ModuleScope :keyword is_deterministic: :paramtype is_deterministic: bool :keyword category: :paramtype category: str :keyword version: :paramtype version: str :keyword set_as_default_version: :paramtype set_as_default_version: bool :keyword registry_name: :paramtype registry_name: str :keyword graph: :paramtype graph: ~flow.models.GraphDraftEntity :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword description: :paramtype description: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ super(GeneratePipelineComponentRequest, self).__init__(**kwargs) self.name = name self.display_name = display_name self.module_scope = module_scope self.is_deterministic = is_deterministic self.category = category self.version = version self.set_as_default_version = set_as_default_version self.registry_name = registry_name self.graph = graph self.pipeline_run_settings = pipeline_run_settings self.module_node_run_settings = module_node_run_settings self.module_node_ui_input_settings = module_node_ui_input_settings self.tags = tags self.continue_run_on_step_failure = continue_run_on_step_failure self.description = description self.properties = properties self.enforce_rerun = enforce_rerun self.dataset_access_modes = dataset_access_modes class GenerateToolMetaRequest(msrest.serialization.Model): """GenerateToolMetaRequest. :ivar tools: This is a dictionary. :vartype tools: dict[str, ~flow.models.ToolSourceMeta] :ivar working_dir: :vartype working_dir: str """ _attribute_map = { 'tools': {'key': 'tools', 'type': '{ToolSourceMeta}'}, 'working_dir': {'key': 'working_dir', 'type': 'str'}, } def __init__( self, *, tools: Optional[Dict[str, "ToolSourceMeta"]] = None, working_dir: Optional[str] = None, **kwargs ): """ :keyword tools: This is a dictionary. :paramtype tools: dict[str, ~flow.models.ToolSourceMeta] :keyword working_dir: :paramtype working_dir: str """ super(GenerateToolMetaRequest, self).__init__(**kwargs) self.tools = tools self.working_dir = working_dir class GetDynamicListRequest(msrest.serialization.Model): """GetDynamicListRequest. :ivar func_path: :vartype func_path: str :ivar func_kwargs: This is a dictionary. :vartype func_kwargs: dict[str, any] """ _attribute_map = { 'func_path': {'key': 'func_path', 'type': 'str'}, 'func_kwargs': {'key': 'func_kwargs', 'type': '{object}'}, } def __init__( self, *, func_path: Optional[str] = None, func_kwargs: Optional[Dict[str, Any]] = None, **kwargs ): """ :keyword func_path: :paramtype func_path: str :keyword func_kwargs: This is a dictionary. :paramtype func_kwargs: dict[str, any] """ super(GetDynamicListRequest, self).__init__(**kwargs) self.func_path = func_path self.func_kwargs = func_kwargs class GetRunDataResultDto(msrest.serialization.Model): """GetRunDataResultDto. :ivar run_metadata: :vartype run_metadata: ~flow.models.RunDto :ivar run_definition: Anything. :vartype run_definition: any :ivar job_specification: Anything. :vartype job_specification: any :ivar system_settings: Dictionary of :code:`<string>`. :vartype system_settings: dict[str, str] """ _attribute_map = { 'run_metadata': {'key': 'runMetadata', 'type': 'RunDto'}, 'run_definition': {'key': 'runDefinition', 'type': 'object'}, 'job_specification': {'key': 'jobSpecification', 'type': 'object'}, 'system_settings': {'key': 'systemSettings', 'type': '{str}'}, } def __init__( self, *, run_metadata: Optional["RunDto"] = None, run_definition: Optional[Any] = None, job_specification: Optional[Any] = None, system_settings: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword run_metadata: :paramtype run_metadata: ~flow.models.RunDto :keyword run_definition: Anything. :paramtype run_definition: any :keyword job_specification: Anything. :paramtype job_specification: any :keyword system_settings: Dictionary of :code:`<string>`. :paramtype system_settings: dict[str, str] """ super(GetRunDataResultDto, self).__init__(**kwargs) self.run_metadata = run_metadata self.run_definition = run_definition self.job_specification = job_specification self.system_settings = system_settings class GetTrainingSessionDto(msrest.serialization.Model): """GetTrainingSessionDto. :ivar properties: :vartype properties: ~flow.models.SessionProperties :ivar compute: :vartype compute: ~flow.models.ComputeContract """ _attribute_map = { 'properties': {'key': 'properties', 'type': 'SessionProperties'}, 'compute': {'key': 'compute', 'type': 'ComputeContract'}, } def __init__( self, *, properties: Optional["SessionProperties"] = None, compute: Optional["ComputeContract"] = None, **kwargs ): """ :keyword properties: :paramtype properties: ~flow.models.SessionProperties :keyword compute: :paramtype compute: ~flow.models.ComputeContract """ super(GetTrainingSessionDto, self).__init__(**kwargs) self.properties = properties self.compute = compute class GlobalJobDispatcherConfiguration(msrest.serialization.Model): """GlobalJobDispatcherConfiguration. :ivar vm_size: :vartype vm_size: list[str] :ivar compute_type: Possible values include: "AmlCompute", "AmlK8s". :vartype compute_type: str or ~flow.models.GlobalJobDispatcherSupportedComputeType :ivar region: :vartype region: list[str] :ivar my_resource_only: :vartype my_resource_only: bool :ivar redispatch_allowed: :vartype redispatch_allowed: bool :ivar low_priority_vm_tolerant: :vartype low_priority_vm_tolerant: bool :ivar vc_list: :vartype vc_list: list[str] :ivar plan_id: :vartype plan_id: str :ivar plan_region_id: :vartype plan_region_id: str :ivar vc_block_list: :vartype vc_block_list: list[str] :ivar cluster_block_list: :vartype cluster_block_list: list[str] """ _attribute_map = { 'vm_size': {'key': 'vmSize', 'type': '[str]'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'region': {'key': 'region', 'type': '[str]'}, 'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'}, 'redispatch_allowed': {'key': 'redispatchAllowed', 'type': 'bool'}, 'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'}, 'vc_list': {'key': 'vcList', 'type': '[str]'}, 'plan_id': {'key': 'planId', 'type': 'str'}, 'plan_region_id': {'key': 'planRegionId', 'type': 'str'}, 'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'}, 'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'}, } def __init__( self, *, vm_size: Optional[List[str]] = None, compute_type: Optional[Union[str, "GlobalJobDispatcherSupportedComputeType"]] = None, region: Optional[List[str]] = None, my_resource_only: Optional[bool] = None, redispatch_allowed: Optional[bool] = None, low_priority_vm_tolerant: Optional[bool] = None, vc_list: Optional[List[str]] = None, plan_id: Optional[str] = None, plan_region_id: Optional[str] = None, vc_block_list: Optional[List[str]] = None, cluster_block_list: Optional[List[str]] = None, **kwargs ): """ :keyword vm_size: :paramtype vm_size: list[str] :keyword compute_type: Possible values include: "AmlCompute", "AmlK8s". :paramtype compute_type: str or ~flow.models.GlobalJobDispatcherSupportedComputeType :keyword region: :paramtype region: list[str] :keyword my_resource_only: :paramtype my_resource_only: bool :keyword redispatch_allowed: :paramtype redispatch_allowed: bool :keyword low_priority_vm_tolerant: :paramtype low_priority_vm_tolerant: bool :keyword vc_list: :paramtype vc_list: list[str] :keyword plan_id: :paramtype plan_id: str :keyword plan_region_id: :paramtype plan_region_id: str :keyword vc_block_list: :paramtype vc_block_list: list[str] :keyword cluster_block_list: :paramtype cluster_block_list: list[str] """ super(GlobalJobDispatcherConfiguration, self).__init__(**kwargs) self.vm_size = vm_size self.compute_type = compute_type self.region = region self.my_resource_only = my_resource_only self.redispatch_allowed = redispatch_allowed self.low_priority_vm_tolerant = low_priority_vm_tolerant self.vc_list = vc_list self.plan_id = plan_id self.plan_region_id = plan_region_id self.vc_block_list = vc_block_list self.cluster_block_list = cluster_block_list class GlobsOptions(msrest.serialization.Model): """GlobsOptions. :ivar glob_patterns: :vartype glob_patterns: list[str] """ _attribute_map = { 'glob_patterns': {'key': 'globPatterns', 'type': '[str]'}, } def __init__( self, *, glob_patterns: Optional[List[str]] = None, **kwargs ): """ :keyword glob_patterns: :paramtype glob_patterns: list[str] """ super(GlobsOptions, self).__init__(**kwargs) self.glob_patterns = glob_patterns class GraphAnnotationNode(msrest.serialization.Model): """GraphAnnotationNode. :ivar id: :vartype id: str :ivar content: :vartype content: str :ivar mentioned_node_names: :vartype mentioned_node_names: list[str] :ivar structured_content: :vartype structured_content: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'content': {'key': 'content', 'type': 'str'}, 'mentioned_node_names': {'key': 'mentionedNodeNames', 'type': '[str]'}, 'structured_content': {'key': 'structuredContent', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, content: Optional[str] = None, mentioned_node_names: Optional[List[str]] = None, structured_content: Optional[str] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword content: :paramtype content: str :keyword mentioned_node_names: :paramtype mentioned_node_names: list[str] :keyword structured_content: :paramtype structured_content: str """ super(GraphAnnotationNode, self).__init__(**kwargs) self.id = id self.content = content self.mentioned_node_names = mentioned_node_names self.structured_content = structured_content class GraphControlNode(msrest.serialization.Model): """GraphControlNode. :ivar id: :vartype id: str :ivar control_type: The only acceptable values to pass in are None and "IfElse". The default value is None. :vartype control_type: str :ivar control_parameter: :vartype control_parameter: ~flow.models.ParameterAssignment :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'control_type': {'key': 'controlType', 'type': 'str'}, 'control_parameter': {'key': 'controlParameter', 'type': 'ParameterAssignment'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, control_type: Optional[str] = None, control_parameter: Optional["ParameterAssignment"] = None, run_attribution: Optional[str] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword control_type: The only acceptable values to pass in are None and "IfElse". The default value is None. :paramtype control_type: str :keyword control_parameter: :paramtype control_parameter: ~flow.models.ParameterAssignment :keyword run_attribution: :paramtype run_attribution: str """ super(GraphControlNode, self).__init__(**kwargs) self.id = id self.control_type = control_type self.control_parameter = control_parameter self.run_attribution = run_attribution class GraphControlReferenceNode(msrest.serialization.Model): """GraphControlReferenceNode. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar comment: :vartype comment: str :ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor". :vartype control_flow_type: str or ~flow.models.ControlFlowType :ivar reference_node_id: :vartype reference_node_id: str :ivar do_while_control_flow_info: :vartype do_while_control_flow_info: ~flow.models.DoWhileControlFlowInfo :ivar parallel_for_control_flow_info: :vartype parallel_for_control_flow_info: ~flow.models.ParallelForControlFlowInfo :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'comment': {'key': 'comment', 'type': 'str'}, 'control_flow_type': {'key': 'controlFlowType', 'type': 'str'}, 'reference_node_id': {'key': 'referenceNodeId', 'type': 'str'}, 'do_while_control_flow_info': {'key': 'doWhileControlFlowInfo', 'type': 'DoWhileControlFlowInfo'}, 'parallel_for_control_flow_info': {'key': 'parallelForControlFlowInfo', 'type': 'ParallelForControlFlowInfo'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, comment: Optional[str] = None, control_flow_type: Optional[Union[str, "ControlFlowType"]] = None, reference_node_id: Optional[str] = None, do_while_control_flow_info: Optional["DoWhileControlFlowInfo"] = None, parallel_for_control_flow_info: Optional["ParallelForControlFlowInfo"] = None, run_attribution: Optional[str] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword comment: :paramtype comment: str :keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor". :paramtype control_flow_type: str or ~flow.models.ControlFlowType :keyword reference_node_id: :paramtype reference_node_id: str :keyword do_while_control_flow_info: :paramtype do_while_control_flow_info: ~flow.models.DoWhileControlFlowInfo :keyword parallel_for_control_flow_info: :paramtype parallel_for_control_flow_info: ~flow.models.ParallelForControlFlowInfo :keyword run_attribution: :paramtype run_attribution: str """ super(GraphControlReferenceNode, self).__init__(**kwargs) self.id = id self.name = name self.comment = comment self.control_flow_type = control_flow_type self.reference_node_id = reference_node_id self.do_while_control_flow_info = do_while_control_flow_info self.parallel_for_control_flow_info = parallel_for_control_flow_info self.run_attribution = run_attribution class GraphDatasetNode(msrest.serialization.Model): """GraphDatasetNode. :ivar id: :vartype id: str :ivar dataset_id: :vartype dataset_id: str :ivar data_path_parameter_name: :vartype data_path_parameter_name: str :ivar data_set_definition: :vartype data_set_definition: ~flow.models.DataSetDefinition """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'dataset_id': {'key': 'datasetId', 'type': 'str'}, 'data_path_parameter_name': {'key': 'dataPathParameterName', 'type': 'str'}, 'data_set_definition': {'key': 'dataSetDefinition', 'type': 'DataSetDefinition'}, } def __init__( self, *, id: Optional[str] = None, dataset_id: Optional[str] = None, data_path_parameter_name: Optional[str] = None, data_set_definition: Optional["DataSetDefinition"] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword dataset_id: :paramtype dataset_id: str :keyword data_path_parameter_name: :paramtype data_path_parameter_name: str :keyword data_set_definition: :paramtype data_set_definition: ~flow.models.DataSetDefinition """ super(GraphDatasetNode, self).__init__(**kwargs) self.id = id self.dataset_id = dataset_id self.data_path_parameter_name = data_path_parameter_name self.data_set_definition = data_set_definition class GraphDraftEntity(msrest.serialization.Model): """GraphDraftEntity. :ivar module_nodes: :vartype module_nodes: list[~flow.models.GraphModuleNode] :ivar dataset_nodes: :vartype dataset_nodes: list[~flow.models.GraphDatasetNode] :ivar sub_graph_nodes: :vartype sub_graph_nodes: list[~flow.models.GraphReferenceNode] :ivar control_reference_nodes: :vartype control_reference_nodes: list[~flow.models.GraphControlReferenceNode] :ivar control_nodes: :vartype control_nodes: list[~flow.models.GraphControlNode] :ivar edges: :vartype edges: list[~flow.models.GraphEdge] :ivar entity_interface: :vartype entity_interface: ~flow.models.EntityInterface :ivar graph_layout: :vartype graph_layout: ~flow.models.GraphLayout :ivar created_by: :vartype created_by: ~flow.models.CreatedBy :ivar last_updated_by: :vartype last_updated_by: ~flow.models.CreatedBy :ivar default_compute: :vartype default_compute: ~flow.models.ComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.DatastoreSetting :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.CloudPrioritySetting :ivar extended_properties: This is a dictionary. :vartype extended_properties: dict[str, str] :ivar parent_sub_graph_module_ids: :vartype parent_sub_graph_module_ids: list[str] :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'module_nodes': {'key': 'moduleNodes', 'type': '[GraphModuleNode]'}, 'dataset_nodes': {'key': 'datasetNodes', 'type': '[GraphDatasetNode]'}, 'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[GraphReferenceNode]'}, 'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[GraphControlReferenceNode]'}, 'control_nodes': {'key': 'controlNodes', 'type': '[GraphControlNode]'}, 'edges': {'key': 'edges', 'type': '[GraphEdge]'}, 'entity_interface': {'key': 'entityInterface', 'type': 'EntityInterface'}, 'graph_layout': {'key': 'graphLayout', 'type': 'GraphLayout'}, 'created_by': {'key': 'createdBy', 'type': 'CreatedBy'}, 'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'}, 'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'}, 'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'}, 'extended_properties': {'key': 'extendedProperties', 'type': '{str}'}, 'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, module_nodes: Optional[List["GraphModuleNode"]] = None, dataset_nodes: Optional[List["GraphDatasetNode"]] = None, sub_graph_nodes: Optional[List["GraphReferenceNode"]] = None, control_reference_nodes: Optional[List["GraphControlReferenceNode"]] = None, control_nodes: Optional[List["GraphControlNode"]] = None, edges: Optional[List["GraphEdge"]] = None, entity_interface: Optional["EntityInterface"] = None, graph_layout: Optional["GraphLayout"] = None, created_by: Optional["CreatedBy"] = None, last_updated_by: Optional["CreatedBy"] = None, default_compute: Optional["ComputeSetting"] = None, default_datastore: Optional["DatastoreSetting"] = None, default_cloud_priority: Optional["CloudPrioritySetting"] = None, extended_properties: Optional[Dict[str, str]] = None, parent_sub_graph_module_ids: Optional[List[str]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword module_nodes: :paramtype module_nodes: list[~flow.models.GraphModuleNode] :keyword dataset_nodes: :paramtype dataset_nodes: list[~flow.models.GraphDatasetNode] :keyword sub_graph_nodes: :paramtype sub_graph_nodes: list[~flow.models.GraphReferenceNode] :keyword control_reference_nodes: :paramtype control_reference_nodes: list[~flow.models.GraphControlReferenceNode] :keyword control_nodes: :paramtype control_nodes: list[~flow.models.GraphControlNode] :keyword edges: :paramtype edges: list[~flow.models.GraphEdge] :keyword entity_interface: :paramtype entity_interface: ~flow.models.EntityInterface :keyword graph_layout: :paramtype graph_layout: ~flow.models.GraphLayout :keyword created_by: :paramtype created_by: ~flow.models.CreatedBy :keyword last_updated_by: :paramtype last_updated_by: ~flow.models.CreatedBy :keyword default_compute: :paramtype default_compute: ~flow.models.ComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.DatastoreSetting :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting :keyword extended_properties: This is a dictionary. :paramtype extended_properties: dict[str, str] :keyword parent_sub_graph_module_ids: :paramtype parent_sub_graph_module_ids: list[str] :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(GraphDraftEntity, self).__init__(**kwargs) self.module_nodes = module_nodes self.dataset_nodes = dataset_nodes self.sub_graph_nodes = sub_graph_nodes self.control_reference_nodes = control_reference_nodes self.control_nodes = control_nodes self.edges = edges self.entity_interface = entity_interface self.graph_layout = graph_layout self.created_by = created_by self.last_updated_by = last_updated_by self.default_compute = default_compute self.default_datastore = default_datastore self.default_cloud_priority = default_cloud_priority self.extended_properties = extended_properties self.parent_sub_graph_module_ids = parent_sub_graph_module_ids self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class GraphEdge(msrest.serialization.Model): """GraphEdge. :ivar source_output_port: :vartype source_output_port: ~flow.models.PortInfo :ivar destination_input_port: :vartype destination_input_port: ~flow.models.PortInfo """ _attribute_map = { 'source_output_port': {'key': 'sourceOutputPort', 'type': 'PortInfo'}, 'destination_input_port': {'key': 'destinationInputPort', 'type': 'PortInfo'}, } def __init__( self, *, source_output_port: Optional["PortInfo"] = None, destination_input_port: Optional["PortInfo"] = None, **kwargs ): """ :keyword source_output_port: :paramtype source_output_port: ~flow.models.PortInfo :keyword destination_input_port: :paramtype destination_input_port: ~flow.models.PortInfo """ super(GraphEdge, self).__init__(**kwargs) self.source_output_port = source_output_port self.destination_input_port = destination_input_port class GraphLayout(msrest.serialization.Model): """GraphLayout. :ivar node_layouts: This is a dictionary. :vartype node_layouts: dict[str, ~flow.models.NodeLayout] :ivar extended_data: :vartype extended_data: str :ivar annotation_nodes: :vartype annotation_nodes: list[~flow.models.GraphAnnotationNode] :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'node_layouts': {'key': 'nodeLayouts', 'type': '{NodeLayout}'}, 'extended_data': {'key': 'extendedData', 'type': 'str'}, 'annotation_nodes': {'key': 'annotationNodes', 'type': '[GraphAnnotationNode]'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, node_layouts: Optional[Dict[str, "NodeLayout"]] = None, extended_data: Optional[str] = None, annotation_nodes: Optional[List["GraphAnnotationNode"]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword node_layouts: This is a dictionary. :paramtype node_layouts: dict[str, ~flow.models.NodeLayout] :keyword extended_data: :paramtype extended_data: str :keyword annotation_nodes: :paramtype annotation_nodes: list[~flow.models.GraphAnnotationNode] :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(GraphLayout, self).__init__(**kwargs) self.node_layouts = node_layouts self.extended_data = extended_data self.annotation_nodes = annotation_nodes self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class GraphLayoutCreationInfo(msrest.serialization.Model): """GraphLayoutCreationInfo. :ivar node_layouts: This is a dictionary. :vartype node_layouts: dict[str, ~flow.models.NodeLayout] :ivar extended_data: :vartype extended_data: str :ivar annotation_nodes: :vartype annotation_nodes: list[~flow.models.GraphAnnotationNode] """ _attribute_map = { 'node_layouts': {'key': 'nodeLayouts', 'type': '{NodeLayout}'}, 'extended_data': {'key': 'extendedData', 'type': 'str'}, 'annotation_nodes': {'key': 'annotationNodes', 'type': '[GraphAnnotationNode]'}, } def __init__( self, *, node_layouts: Optional[Dict[str, "NodeLayout"]] = None, extended_data: Optional[str] = None, annotation_nodes: Optional[List["GraphAnnotationNode"]] = None, **kwargs ): """ :keyword node_layouts: This is a dictionary. :paramtype node_layouts: dict[str, ~flow.models.NodeLayout] :keyword extended_data: :paramtype extended_data: str :keyword annotation_nodes: :paramtype annotation_nodes: list[~flow.models.GraphAnnotationNode] """ super(GraphLayoutCreationInfo, self).__init__(**kwargs) self.node_layouts = node_layouts self.extended_data = extended_data self.annotation_nodes = annotation_nodes class GraphModuleNode(msrest.serialization.Model): """GraphModuleNode. :ivar module_type: Possible values include: "None", "BatchInferencing". :vartype module_type: str or ~flow.models.ModuleType :ivar runconfig: :vartype runconfig: str :ivar id: :vartype id: str :ivar module_id: :vartype module_id: str :ivar comment: :vartype comment: str :ivar name: :vartype name: str :ivar module_parameters: :vartype module_parameters: list[~flow.models.ParameterAssignment] :ivar module_metadata_parameters: :vartype module_metadata_parameters: list[~flow.models.ParameterAssignment] :ivar module_output_settings: :vartype module_output_settings: list[~flow.models.OutputSetting] :ivar module_input_settings: :vartype module_input_settings: list[~flow.models.InputSetting] :ivar use_graph_default_compute: :vartype use_graph_default_compute: bool :ivar use_graph_default_datastore: :vartype use_graph_default_datastore: bool :ivar regenerate_output: :vartype regenerate_output: bool :ivar control_inputs: :vartype control_inputs: list[~flow.models.ControlInput] :ivar cloud_settings: :vartype cloud_settings: ~flow.models.CloudSettings :ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :vartype execution_phase: str or ~flow.models.ExecutionPhase :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'module_type': {'key': 'moduleType', 'type': 'str'}, 'runconfig': {'key': 'runconfig', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'module_id': {'key': 'moduleId', 'type': 'str'}, 'comment': {'key': 'comment', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'module_parameters': {'key': 'moduleParameters', 'type': '[ParameterAssignment]'}, 'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[ParameterAssignment]'}, 'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[OutputSetting]'}, 'module_input_settings': {'key': 'moduleInputSettings', 'type': '[InputSetting]'}, 'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'}, 'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'}, 'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'}, 'control_inputs': {'key': 'controlInputs', 'type': '[ControlInput]'}, 'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'}, 'execution_phase': {'key': 'executionPhase', 'type': 'str'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, *, module_type: Optional[Union[str, "ModuleType"]] = None, runconfig: Optional[str] = None, id: Optional[str] = None, module_id: Optional[str] = None, comment: Optional[str] = None, name: Optional[str] = None, module_parameters: Optional[List["ParameterAssignment"]] = None, module_metadata_parameters: Optional[List["ParameterAssignment"]] = None, module_output_settings: Optional[List["OutputSetting"]] = None, module_input_settings: Optional[List["InputSetting"]] = None, use_graph_default_compute: Optional[bool] = None, use_graph_default_datastore: Optional[bool] = None, regenerate_output: Optional[bool] = None, control_inputs: Optional[List["ControlInput"]] = None, cloud_settings: Optional["CloudSettings"] = None, execution_phase: Optional[Union[str, "ExecutionPhase"]] = None, run_attribution: Optional[str] = None, **kwargs ): """ :keyword module_type: Possible values include: "None", "BatchInferencing". :paramtype module_type: str or ~flow.models.ModuleType :keyword runconfig: :paramtype runconfig: str :keyword id: :paramtype id: str :keyword module_id: :paramtype module_id: str :keyword comment: :paramtype comment: str :keyword name: :paramtype name: str :keyword module_parameters: :paramtype module_parameters: list[~flow.models.ParameterAssignment] :keyword module_metadata_parameters: :paramtype module_metadata_parameters: list[~flow.models.ParameterAssignment] :keyword module_output_settings: :paramtype module_output_settings: list[~flow.models.OutputSetting] :keyword module_input_settings: :paramtype module_input_settings: list[~flow.models.InputSetting] :keyword use_graph_default_compute: :paramtype use_graph_default_compute: bool :keyword use_graph_default_datastore: :paramtype use_graph_default_datastore: bool :keyword regenerate_output: :paramtype regenerate_output: bool :keyword control_inputs: :paramtype control_inputs: list[~flow.models.ControlInput] :keyword cloud_settings: :paramtype cloud_settings: ~flow.models.CloudSettings :keyword execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :paramtype execution_phase: str or ~flow.models.ExecutionPhase :keyword run_attribution: :paramtype run_attribution: str """ super(GraphModuleNode, self).__init__(**kwargs) self.module_type = module_type self.runconfig = runconfig self.id = id self.module_id = module_id self.comment = comment self.name = name self.module_parameters = module_parameters self.module_metadata_parameters = module_metadata_parameters self.module_output_settings = module_output_settings self.module_input_settings = module_input_settings self.use_graph_default_compute = use_graph_default_compute self.use_graph_default_datastore = use_graph_default_datastore self.regenerate_output = regenerate_output self.control_inputs = control_inputs self.cloud_settings = cloud_settings self.execution_phase = execution_phase self.run_attribution = run_attribution class GraphModuleNodeRunSetting(msrest.serialization.Model): """GraphModuleNodeRunSetting. :ivar node_id: :vartype node_id: str :ivar module_id: :vartype module_id: str :ivar step_type: :vartype step_type: str :ivar run_settings: :vartype run_settings: list[~flow.models.RunSettingParameterAssignment] """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'module_id': {'key': 'moduleId', 'type': 'str'}, 'step_type': {'key': 'stepType', 'type': 'str'}, 'run_settings': {'key': 'runSettings', 'type': '[RunSettingParameterAssignment]'}, } def __init__( self, *, node_id: Optional[str] = None, module_id: Optional[str] = None, step_type: Optional[str] = None, run_settings: Optional[List["RunSettingParameterAssignment"]] = None, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword module_id: :paramtype module_id: str :keyword step_type: :paramtype step_type: str :keyword run_settings: :paramtype run_settings: list[~flow.models.RunSettingParameterAssignment] """ super(GraphModuleNodeRunSetting, self).__init__(**kwargs) self.node_id = node_id self.module_id = module_id self.step_type = step_type self.run_settings = run_settings class GraphModuleNodeUIInputSetting(msrest.serialization.Model): """GraphModuleNodeUIInputSetting. :ivar node_id: :vartype node_id: str :ivar module_id: :vartype module_id: str :ivar module_input_settings: :vartype module_input_settings: list[~flow.models.UIInputSetting] """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'module_id': {'key': 'moduleId', 'type': 'str'}, 'module_input_settings': {'key': 'moduleInputSettings', 'type': '[UIInputSetting]'}, } def __init__( self, *, node_id: Optional[str] = None, module_id: Optional[str] = None, module_input_settings: Optional[List["UIInputSetting"]] = None, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword module_id: :paramtype module_id: str :keyword module_input_settings: :paramtype module_input_settings: list[~flow.models.UIInputSetting] """ super(GraphModuleNodeUIInputSetting, self).__init__(**kwargs) self.node_id = node_id self.module_id = module_id self.module_input_settings = module_input_settings class GraphNodeStatusInfo(msrest.serialization.Model): """GraphNodeStatusInfo. :ivar status: Possible values include: "NotStarted", "Queued", "Running", "Failed", "Finished", "Canceled", "PartiallyExecuted", "Bypassed". :vartype status: str or ~flow.models.TaskStatusCode :ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype run_status: str or ~flow.models.RunStatus :ivar is_bypassed: :vartype is_bypassed: bool :ivar has_failed_child_run: :vartype has_failed_child_run: bool :ivar partially_executed: :vartype partially_executed: bool :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar aether_start_time: :vartype aether_start_time: ~datetime.datetime :ivar aether_end_time: :vartype aether_end_time: ~datetime.datetime :ivar aether_creation_time: :vartype aether_creation_time: ~datetime.datetime :ivar run_history_start_time: :vartype run_history_start_time: ~datetime.datetime :ivar run_history_end_time: :vartype run_history_end_time: ~datetime.datetime :ivar run_history_creation_time: :vartype run_history_creation_time: ~datetime.datetime :ivar reuse_info: :vartype reuse_info: ~flow.models.TaskReuseInfo :ivar control_flow_info: :vartype control_flow_info: ~flow.models.TaskControlFlowInfo :ivar status_code: Possible values include: "NotStarted", "Queued", "Running", "Failed", "Finished", "Canceled", "PartiallyExecuted", "Bypassed". :vartype status_code: str or ~flow.models.TaskStatusCode :ivar status_detail: :vartype status_detail: str :ivar creation_time: :vartype creation_time: ~datetime.datetime :ivar schedule_time: :vartype schedule_time: ~datetime.datetime :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar request_id: :vartype request_id: str :ivar run_id: :vartype run_id: str :ivar data_container_id: :vartype data_container_id: str :ivar real_time_log_path: :vartype real_time_log_path: str :ivar has_warnings: :vartype has_warnings: bool :ivar composite_node_id: :vartype composite_node_id: str """ _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'run_status': {'key': 'runStatus', 'type': 'str'}, 'is_bypassed': {'key': 'isBypassed', 'type': 'bool'}, 'has_failed_child_run': {'key': 'hasFailedChildRun', 'type': 'bool'}, 'partially_executed': {'key': 'partiallyExecuted', 'type': 'bool'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'}, 'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'}, 'aether_creation_time': {'key': 'aetherCreationTime', 'type': 'iso-8601'}, 'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'}, 'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'}, 'run_history_creation_time': {'key': 'runHistoryCreationTime', 'type': 'iso-8601'}, 'reuse_info': {'key': 'reuseInfo', 'type': 'TaskReuseInfo'}, 'control_flow_info': {'key': 'controlFlowInfo', 'type': 'TaskControlFlowInfo'}, 'status_code': {'key': 'statusCode', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'creation_time': {'key': 'creationTime', 'type': 'iso-8601'}, 'schedule_time': {'key': 'scheduleTime', 'type': 'iso-8601'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'request_id': {'key': 'requestId', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'data_container_id': {'key': 'dataContainerId', 'type': 'str'}, 'real_time_log_path': {'key': 'realTimeLogPath', 'type': 'str'}, 'has_warnings': {'key': 'hasWarnings', 'type': 'bool'}, 'composite_node_id': {'key': 'compositeNodeId', 'type': 'str'}, } def __init__( self, *, status: Optional[Union[str, "TaskStatusCode"]] = None, run_status: Optional[Union[str, "RunStatus"]] = None, is_bypassed: Optional[bool] = None, has_failed_child_run: Optional[bool] = None, partially_executed: Optional[bool] = None, properties: Optional[Dict[str, str]] = None, aether_start_time: Optional[datetime.datetime] = None, aether_end_time: Optional[datetime.datetime] = None, aether_creation_time: Optional[datetime.datetime] = None, run_history_start_time: Optional[datetime.datetime] = None, run_history_end_time: Optional[datetime.datetime] = None, run_history_creation_time: Optional[datetime.datetime] = None, reuse_info: Optional["TaskReuseInfo"] = None, control_flow_info: Optional["TaskControlFlowInfo"] = None, status_code: Optional[Union[str, "TaskStatusCode"]] = None, status_detail: Optional[str] = None, creation_time: Optional[datetime.datetime] = None, schedule_time: Optional[datetime.datetime] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, request_id: Optional[str] = None, run_id: Optional[str] = None, data_container_id: Optional[str] = None, real_time_log_path: Optional[str] = None, has_warnings: Optional[bool] = None, composite_node_id: Optional[str] = None, **kwargs ): """ :keyword status: Possible values include: "NotStarted", "Queued", "Running", "Failed", "Finished", "Canceled", "PartiallyExecuted", "Bypassed". :paramtype status: str or ~flow.models.TaskStatusCode :keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype run_status: str or ~flow.models.RunStatus :keyword is_bypassed: :paramtype is_bypassed: bool :keyword has_failed_child_run: :paramtype has_failed_child_run: bool :keyword partially_executed: :paramtype partially_executed: bool :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword aether_start_time: :paramtype aether_start_time: ~datetime.datetime :keyword aether_end_time: :paramtype aether_end_time: ~datetime.datetime :keyword aether_creation_time: :paramtype aether_creation_time: ~datetime.datetime :keyword run_history_start_time: :paramtype run_history_start_time: ~datetime.datetime :keyword run_history_end_time: :paramtype run_history_end_time: ~datetime.datetime :keyword run_history_creation_time: :paramtype run_history_creation_time: ~datetime.datetime :keyword reuse_info: :paramtype reuse_info: ~flow.models.TaskReuseInfo :keyword control_flow_info: :paramtype control_flow_info: ~flow.models.TaskControlFlowInfo :keyword status_code: Possible values include: "NotStarted", "Queued", "Running", "Failed", "Finished", "Canceled", "PartiallyExecuted", "Bypassed". :paramtype status_code: str or ~flow.models.TaskStatusCode :keyword status_detail: :paramtype status_detail: str :keyword creation_time: :paramtype creation_time: ~datetime.datetime :keyword schedule_time: :paramtype schedule_time: ~datetime.datetime :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword request_id: :paramtype request_id: str :keyword run_id: :paramtype run_id: str :keyword data_container_id: :paramtype data_container_id: str :keyword real_time_log_path: :paramtype real_time_log_path: str :keyword has_warnings: :paramtype has_warnings: bool :keyword composite_node_id: :paramtype composite_node_id: str """ super(GraphNodeStatusInfo, self).__init__(**kwargs) self.status = status self.run_status = run_status self.is_bypassed = is_bypassed self.has_failed_child_run = has_failed_child_run self.partially_executed = partially_executed self.properties = properties self.aether_start_time = aether_start_time self.aether_end_time = aether_end_time self.aether_creation_time = aether_creation_time self.run_history_start_time = run_history_start_time self.run_history_end_time = run_history_end_time self.run_history_creation_time = run_history_creation_time self.reuse_info = reuse_info self.control_flow_info = control_flow_info self.status_code = status_code self.status_detail = status_detail self.creation_time = creation_time self.schedule_time = schedule_time self.start_time = start_time self.end_time = end_time self.request_id = request_id self.run_id = run_id self.data_container_id = data_container_id self.real_time_log_path = real_time_log_path self.has_warnings = has_warnings self.composite_node_id = composite_node_id class GraphReferenceNode(msrest.serialization.Model): """GraphReferenceNode. :ivar graph_id: :vartype graph_id: str :ivar default_compute: :vartype default_compute: ~flow.models.ComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.DatastoreSetting :ivar id: :vartype id: str :ivar module_id: :vartype module_id: str :ivar comment: :vartype comment: str :ivar name: :vartype name: str :ivar module_parameters: :vartype module_parameters: list[~flow.models.ParameterAssignment] :ivar module_metadata_parameters: :vartype module_metadata_parameters: list[~flow.models.ParameterAssignment] :ivar module_output_settings: :vartype module_output_settings: list[~flow.models.OutputSetting] :ivar module_input_settings: :vartype module_input_settings: list[~flow.models.InputSetting] :ivar use_graph_default_compute: :vartype use_graph_default_compute: bool :ivar use_graph_default_datastore: :vartype use_graph_default_datastore: bool :ivar regenerate_output: :vartype regenerate_output: bool :ivar control_inputs: :vartype control_inputs: list[~flow.models.ControlInput] :ivar cloud_settings: :vartype cloud_settings: ~flow.models.CloudSettings :ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :vartype execution_phase: str or ~flow.models.ExecutionPhase :ivar run_attribution: :vartype run_attribution: str """ _attribute_map = { 'graph_id': {'key': 'graphId', 'type': 'str'}, 'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'}, 'id': {'key': 'id', 'type': 'str'}, 'module_id': {'key': 'moduleId', 'type': 'str'}, 'comment': {'key': 'comment', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'module_parameters': {'key': 'moduleParameters', 'type': '[ParameterAssignment]'}, 'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[ParameterAssignment]'}, 'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[OutputSetting]'}, 'module_input_settings': {'key': 'moduleInputSettings', 'type': '[InputSetting]'}, 'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'}, 'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'}, 'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'}, 'control_inputs': {'key': 'controlInputs', 'type': '[ControlInput]'}, 'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'}, 'execution_phase': {'key': 'executionPhase', 'type': 'str'}, 'run_attribution': {'key': 'runAttribution', 'type': 'str'}, } def __init__( self, *, graph_id: Optional[str] = None, default_compute: Optional["ComputeSetting"] = None, default_datastore: Optional["DatastoreSetting"] = None, id: Optional[str] = None, module_id: Optional[str] = None, comment: Optional[str] = None, name: Optional[str] = None, module_parameters: Optional[List["ParameterAssignment"]] = None, module_metadata_parameters: Optional[List["ParameterAssignment"]] = None, module_output_settings: Optional[List["OutputSetting"]] = None, module_input_settings: Optional[List["InputSetting"]] = None, use_graph_default_compute: Optional[bool] = None, use_graph_default_datastore: Optional[bool] = None, regenerate_output: Optional[bool] = None, control_inputs: Optional[List["ControlInput"]] = None, cloud_settings: Optional["CloudSettings"] = None, execution_phase: Optional[Union[str, "ExecutionPhase"]] = None, run_attribution: Optional[str] = None, **kwargs ): """ :keyword graph_id: :paramtype graph_id: str :keyword default_compute: :paramtype default_compute: ~flow.models.ComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.DatastoreSetting :keyword id: :paramtype id: str :keyword module_id: :paramtype module_id: str :keyword comment: :paramtype comment: str :keyword name: :paramtype name: str :keyword module_parameters: :paramtype module_parameters: list[~flow.models.ParameterAssignment] :keyword module_metadata_parameters: :paramtype module_metadata_parameters: list[~flow.models.ParameterAssignment] :keyword module_output_settings: :paramtype module_output_settings: list[~flow.models.OutputSetting] :keyword module_input_settings: :paramtype module_input_settings: list[~flow.models.InputSetting] :keyword use_graph_default_compute: :paramtype use_graph_default_compute: bool :keyword use_graph_default_datastore: :paramtype use_graph_default_datastore: bool :keyword regenerate_output: :paramtype regenerate_output: bool :keyword control_inputs: :paramtype control_inputs: list[~flow.models.ControlInput] :keyword cloud_settings: :paramtype cloud_settings: ~flow.models.CloudSettings :keyword execution_phase: Possible values include: "Execution", "Initialization", "Finalization". :paramtype execution_phase: str or ~flow.models.ExecutionPhase :keyword run_attribution: :paramtype run_attribution: str """ super(GraphReferenceNode, self).__init__(**kwargs) self.graph_id = graph_id self.default_compute = default_compute self.default_datastore = default_datastore self.id = id self.module_id = module_id self.comment = comment self.name = name self.module_parameters = module_parameters self.module_metadata_parameters = module_metadata_parameters self.module_output_settings = module_output_settings self.module_input_settings = module_input_settings self.use_graph_default_compute = use_graph_default_compute self.use_graph_default_datastore = use_graph_default_datastore self.regenerate_output = regenerate_output self.control_inputs = control_inputs self.cloud_settings = cloud_settings self.execution_phase = execution_phase self.run_attribution = run_attribution class HdfsReference(msrest.serialization.Model): """HdfsReference. :ivar aml_data_store_name: :vartype aml_data_store_name: str :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, *, aml_data_store_name: Optional[str] = None, relative_path: Optional[str] = None, **kwargs ): """ :keyword aml_data_store_name: :paramtype aml_data_store_name: str :keyword relative_path: :paramtype relative_path: str """ super(HdfsReference, self).__init__(**kwargs) self.aml_data_store_name = aml_data_store_name self.relative_path = relative_path class HdiClusterComputeInfo(msrest.serialization.Model): """HdiClusterComputeInfo. :ivar address: :vartype address: str :ivar username: :vartype username: str :ivar password: :vartype password: str :ivar private_key: :vartype private_key: str """ _attribute_map = { 'address': {'key': 'address', 'type': 'str'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'private_key': {'key': 'privateKey', 'type': 'str'}, } def __init__( self, *, address: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, private_key: Optional[str] = None, **kwargs ): """ :keyword address: :paramtype address: str :keyword username: :paramtype username: str :keyword password: :paramtype password: str :keyword private_key: :paramtype private_key: str """ super(HdiClusterComputeInfo, self).__init__(**kwargs) self.address = address self.username = username self.password = password self.private_key = private_key class HdiConfiguration(msrest.serialization.Model): """HdiConfiguration. :ivar yarn_deploy_mode: Possible values include: "None", "Client", "Cluster". :vartype yarn_deploy_mode: str or ~flow.models.YarnDeployMode """ _attribute_map = { 'yarn_deploy_mode': {'key': 'yarnDeployMode', 'type': 'str'}, } def __init__( self, *, yarn_deploy_mode: Optional[Union[str, "YarnDeployMode"]] = None, **kwargs ): """ :keyword yarn_deploy_mode: Possible values include: "None", "Client", "Cluster". :paramtype yarn_deploy_mode: str or ~flow.models.YarnDeployMode """ super(HdiConfiguration, self).__init__(**kwargs) self.yarn_deploy_mode = yarn_deploy_mode class HdiRunConfiguration(msrest.serialization.Model): """HdiRunConfiguration. :ivar file: :vartype file: str :ivar class_name: :vartype class_name: str :ivar files: :vartype files: list[str] :ivar archives: :vartype archives: list[str] :ivar jars: :vartype jars: list[str] :ivar py_files: :vartype py_files: list[str] :ivar compute_name: :vartype compute_name: str :ivar queue: :vartype queue: str :ivar driver_memory: :vartype driver_memory: str :ivar driver_cores: :vartype driver_cores: int :ivar executor_memory: :vartype executor_memory: str :ivar executor_cores: :vartype executor_cores: int :ivar number_executors: :vartype number_executors: int :ivar conf: Dictionary of :code:`<string>`. :vartype conf: dict[str, str] :ivar name: :vartype name: str """ _attribute_map = { 'file': {'key': 'file', 'type': 'str'}, 'class_name': {'key': 'className', 'type': 'str'}, 'files': {'key': 'files', 'type': '[str]'}, 'archives': {'key': 'archives', 'type': '[str]'}, 'jars': {'key': 'jars', 'type': '[str]'}, 'py_files': {'key': 'pyFiles', 'type': '[str]'}, 'compute_name': {'key': 'computeName', 'type': 'str'}, 'queue': {'key': 'queue', 'type': 'str'}, 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, 'driver_cores': {'key': 'driverCores', 'type': 'int'}, 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, 'executor_cores': {'key': 'executorCores', 'type': 'int'}, 'number_executors': {'key': 'numberExecutors', 'type': 'int'}, 'conf': {'key': 'conf', 'type': '{str}'}, 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, *, file: Optional[str] = None, class_name: Optional[str] = None, files: Optional[List[str]] = None, archives: Optional[List[str]] = None, jars: Optional[List[str]] = None, py_files: Optional[List[str]] = None, compute_name: Optional[str] = None, queue: Optional[str] = None, driver_memory: Optional[str] = None, driver_cores: Optional[int] = None, executor_memory: Optional[str] = None, executor_cores: Optional[int] = None, number_executors: Optional[int] = None, conf: Optional[Dict[str, str]] = None, name: Optional[str] = None, **kwargs ): """ :keyword file: :paramtype file: str :keyword class_name: :paramtype class_name: str :keyword files: :paramtype files: list[str] :keyword archives: :paramtype archives: list[str] :keyword jars: :paramtype jars: list[str] :keyword py_files: :paramtype py_files: list[str] :keyword compute_name: :paramtype compute_name: str :keyword queue: :paramtype queue: str :keyword driver_memory: :paramtype driver_memory: str :keyword driver_cores: :paramtype driver_cores: int :keyword executor_memory: :paramtype executor_memory: str :keyword executor_cores: :paramtype executor_cores: int :keyword number_executors: :paramtype number_executors: int :keyword conf: Dictionary of :code:`<string>`. :paramtype conf: dict[str, str] :keyword name: :paramtype name: str """ super(HdiRunConfiguration, self).__init__(**kwargs) self.file = file self.class_name = class_name self.files = files self.archives = archives self.jars = jars self.py_files = py_files self.compute_name = compute_name self.queue = queue self.driver_memory = driver_memory self.driver_cores = driver_cores self.executor_memory = executor_memory self.executor_cores = executor_cores self.number_executors = number_executors self.conf = conf self.name = name class HistoryConfiguration(msrest.serialization.Model): """HistoryConfiguration. :ivar output_collection: :vartype output_collection: bool :ivar directories_to_watch: :vartype directories_to_watch: list[str] :ivar enable_m_lflow_tracking: :vartype enable_m_lflow_tracking: bool """ _attribute_map = { 'output_collection': {'key': 'outputCollection', 'type': 'bool'}, 'directories_to_watch': {'key': 'directoriesToWatch', 'type': '[str]'}, 'enable_m_lflow_tracking': {'key': 'enableMLflowTracking', 'type': 'bool'}, } def __init__( self, *, output_collection: Optional[bool] = True, directories_to_watch: Optional[List[str]] = ['logs'], enable_m_lflow_tracking: Optional[bool] = True, **kwargs ): """ :keyword output_collection: :paramtype output_collection: bool :keyword directories_to_watch: :paramtype directories_to_watch: list[str] :keyword enable_m_lflow_tracking: :paramtype enable_m_lflow_tracking: bool """ super(HistoryConfiguration, self).__init__(**kwargs) self.output_collection = output_collection self.directories_to_watch = directories_to_watch self.enable_m_lflow_tracking = enable_m_lflow_tracking class HyperDriveConfiguration(msrest.serialization.Model): """HyperDriveConfiguration. :ivar hyper_drive_run_config: :vartype hyper_drive_run_config: str :ivar primary_metric_goal: :vartype primary_metric_goal: str :ivar primary_metric_name: :vartype primary_metric_name: str :ivar arguments: :vartype arguments: list[~flow.models.ArgumentAssignment] """ _attribute_map = { 'hyper_drive_run_config': {'key': 'hyperDriveRunConfig', 'type': 'str'}, 'primary_metric_goal': {'key': 'primaryMetricGoal', 'type': 'str'}, 'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'}, 'arguments': {'key': 'arguments', 'type': '[ArgumentAssignment]'}, } def __init__( self, *, hyper_drive_run_config: Optional[str] = None, primary_metric_goal: Optional[str] = None, primary_metric_name: Optional[str] = None, arguments: Optional[List["ArgumentAssignment"]] = None, **kwargs ): """ :keyword hyper_drive_run_config: :paramtype hyper_drive_run_config: str :keyword primary_metric_goal: :paramtype primary_metric_goal: str :keyword primary_metric_name: :paramtype primary_metric_name: str :keyword arguments: :paramtype arguments: list[~flow.models.ArgumentAssignment] """ super(HyperDriveConfiguration, self).__init__(**kwargs) self.hyper_drive_run_config = hyper_drive_run_config self.primary_metric_goal = primary_metric_goal self.primary_metric_name = primary_metric_name self.arguments = arguments class ICheckableLongRunningOperationResponse(msrest.serialization.Model): """ICheckableLongRunningOperationResponse. :ivar completion_result: Any object. :vartype completion_result: any :ivar location: :vartype location: str :ivar operation_result: :vartype operation_result: str """ _attribute_map = { 'completion_result': {'key': 'completionResult', 'type': 'object'}, 'location': {'key': 'location', 'type': 'str'}, 'operation_result': {'key': 'operationResult', 'type': 'str'}, } def __init__( self, *, completion_result: Optional[Any] = None, location: Optional[str] = None, operation_result: Optional[str] = None, **kwargs ): """ :keyword completion_result: Any object. :paramtype completion_result: any :keyword location: :paramtype location: str :keyword operation_result: :paramtype operation_result: str """ super(ICheckableLongRunningOperationResponse, self).__init__(**kwargs) self.completion_result = completion_result self.location = location self.operation_result = operation_result class IdentityConfiguration(msrest.serialization.Model): """IdentityConfiguration. :ivar type: Possible values include: "Managed", "ServicePrincipal", "AMLToken". :vartype type: str or ~flow.models.IdentityType :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar secret: :vartype secret: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'secret': {'key': 'secret', 'type': 'str'}, } def __init__( self, *, type: Optional[Union[str, "IdentityType"]] = None, properties: Optional[Dict[str, str]] = None, secret: Optional[str] = None, **kwargs ): """ :keyword type: Possible values include: "Managed", "ServicePrincipal", "AMLToken". :paramtype type: str or ~flow.models.IdentityType :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword secret: :paramtype secret: str """ super(IdentityConfiguration, self).__init__(**kwargs) self.type = type self.properties = properties self.secret = secret class IdentitySetting(msrest.serialization.Model): """IdentitySetting. :ivar type: Possible values include: "UserIdentity", "Managed", "AMLToken". :vartype type: str or ~flow.models.AEVAIdentityType :ivar client_id: :vartype client_id: str :ivar object_id: :vartype object_id: str :ivar msi_resource_id: :vartype msi_resource_id: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'client_id': {'key': 'clientId', 'type': 'str'}, 'object_id': {'key': 'objectId', 'type': 'str'}, 'msi_resource_id': {'key': 'msiResourceId', 'type': 'str'}, } def __init__( self, *, type: Optional[Union[str, "AEVAIdentityType"]] = None, client_id: Optional[str] = None, object_id: Optional[str] = None, msi_resource_id: Optional[str] = None, **kwargs ): """ :keyword type: Possible values include: "UserIdentity", "Managed", "AMLToken". :paramtype type: str or ~flow.models.AEVAIdentityType :keyword client_id: :paramtype client_id: str :keyword object_id: :paramtype object_id: str :keyword msi_resource_id: :paramtype msi_resource_id: str """ super(IdentitySetting, self).__init__(**kwargs) self.type = type self.client_id = client_id self.object_id = object_id self.msi_resource_id = msi_resource_id class ImportDataTask(msrest.serialization.Model): """ImportDataTask. :ivar data_transfer_source: :vartype data_transfer_source: ~flow.models.DataTransferSource """ _attribute_map = { 'data_transfer_source': {'key': 'DataTransferSource', 'type': 'DataTransferSource'}, } def __init__( self, *, data_transfer_source: Optional["DataTransferSource"] = None, **kwargs ): """ :keyword data_transfer_source: :paramtype data_transfer_source: ~flow.models.DataTransferSource """ super(ImportDataTask, self).__init__(**kwargs) self.data_transfer_source = data_transfer_source class IndexedErrorResponse(msrest.serialization.Model): """IndexedErrorResponse. :ivar code: :vartype code: str :ivar error_code_hierarchy: :vartype error_code_hierarchy: str :ivar message: :vartype message: str :ivar time: :vartype time: ~datetime.datetime :ivar component_name: :vartype component_name: str :ivar severity: :vartype severity: int :ivar details_uri: :vartype details_uri: str :ivar reference_code: :vartype reference_code: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'error_code_hierarchy': {'key': 'errorCodeHierarchy', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'time': {'key': 'time', 'type': 'iso-8601'}, 'component_name': {'key': 'componentName', 'type': 'str'}, 'severity': {'key': 'severity', 'type': 'int'}, 'details_uri': {'key': 'detailsUri', 'type': 'str'}, 'reference_code': {'key': 'referenceCode', 'type': 'str'}, } def __init__( self, *, code: Optional[str] = None, error_code_hierarchy: Optional[str] = None, message: Optional[str] = None, time: Optional[datetime.datetime] = None, component_name: Optional[str] = None, severity: Optional[int] = None, details_uri: Optional[str] = None, reference_code: Optional[str] = None, **kwargs ): """ :keyword code: :paramtype code: str :keyword error_code_hierarchy: :paramtype error_code_hierarchy: str :keyword message: :paramtype message: str :keyword time: :paramtype time: ~datetime.datetime :keyword component_name: :paramtype component_name: str :keyword severity: :paramtype severity: int :keyword details_uri: :paramtype details_uri: str :keyword reference_code: :paramtype reference_code: str """ super(IndexedErrorResponse, self).__init__(**kwargs) self.code = code self.error_code_hierarchy = error_code_hierarchy self.message = message self.time = time self.component_name = component_name self.severity = severity self.details_uri = details_uri self.reference_code = reference_code class InitScriptInfoDto(msrest.serialization.Model): """InitScriptInfoDto. :ivar dbfs: :vartype dbfs: ~flow.models.DbfsStorageInfoDto """ _attribute_map = { 'dbfs': {'key': 'dbfs', 'type': 'DbfsStorageInfoDto'}, } def __init__( self, *, dbfs: Optional["DbfsStorageInfoDto"] = None, **kwargs ): """ :keyword dbfs: :paramtype dbfs: ~flow.models.DbfsStorageInfoDto """ super(InitScriptInfoDto, self).__init__(**kwargs) self.dbfs = dbfs class InnerErrorDetails(msrest.serialization.Model): """InnerErrorDetails. :ivar code: :vartype code: str :ivar message: :vartype message: str :ivar target: :vartype target: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, } def __init__( self, *, code: Optional[str] = None, message: Optional[str] = None, target: Optional[str] = None, **kwargs ): """ :keyword code: :paramtype code: str :keyword message: :paramtype message: str :keyword target: :paramtype target: str """ super(InnerErrorDetails, self).__init__(**kwargs) self.code = code self.message = message self.target = target class InnerErrorResponse(msrest.serialization.Model): """A nested structure of errors. :ivar code: The error code. :vartype code: str :ivar inner_error: A nested structure of errors. :vartype inner_error: ~flow.models.InnerErrorResponse """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'}, } def __init__( self, *, code: Optional[str] = None, inner_error: Optional["InnerErrorResponse"] = None, **kwargs ): """ :keyword code: The error code. :paramtype code: str :keyword inner_error: A nested structure of errors. :paramtype inner_error: ~flow.models.InnerErrorResponse """ super(InnerErrorResponse, self).__init__(**kwargs) self.code = code self.inner_error = inner_error class InputAsset(msrest.serialization.Model): """InputAsset. :ivar asset: :vartype asset: ~flow.models.Asset :ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs". :vartype mechanism: str or ~flow.models.DeliveryMechanism :ivar environment_variable_name: :vartype environment_variable_name: str :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool :ivar options: Dictionary of :code:`<string>`. :vartype options: dict[str, str] """ _attribute_map = { 'asset': {'key': 'asset', 'type': 'Asset'}, 'mechanism': {'key': 'mechanism', 'type': 'str'}, 'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'options': {'key': 'options', 'type': '{str}'}, } def __init__( self, *, asset: Optional["Asset"] = None, mechanism: Optional[Union[str, "DeliveryMechanism"]] = None, environment_variable_name: Optional[str] = None, path_on_compute: Optional[str] = None, overwrite: Optional[bool] = None, options: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword asset: :paramtype asset: ~flow.models.Asset :keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs". :paramtype mechanism: str or ~flow.models.DeliveryMechanism :keyword environment_variable_name: :paramtype environment_variable_name: str :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool :keyword options: Dictionary of :code:`<string>`. :paramtype options: dict[str, str] """ super(InputAsset, self).__init__(**kwargs) self.asset = asset self.mechanism = mechanism self.environment_variable_name = environment_variable_name self.path_on_compute = path_on_compute self.overwrite = overwrite self.options = options class InputData(msrest.serialization.Model): """InputData. :ivar dataset_id: :vartype dataset_id: str :ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :vartype mode: str or ~flow.models.DataBindingMode :ivar value: :vartype value: str """ _attribute_map = { 'dataset_id': {'key': 'datasetId', 'type': 'str'}, 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, *, dataset_id: Optional[str] = None, mode: Optional[Union[str, "DataBindingMode"]] = None, value: Optional[str] = None, **kwargs ): """ :keyword dataset_id: :paramtype dataset_id: str :keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :paramtype mode: str or ~flow.models.DataBindingMode :keyword value: :paramtype value: str """ super(InputData, self).__init__(**kwargs) self.dataset_id = dataset_id self.mode = mode self.value = value class InputDataBinding(msrest.serialization.Model): """InputDataBinding. :ivar data_id: :vartype data_id: str :ivar path_on_compute: :vartype path_on_compute: str :ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :vartype mode: str or ~flow.models.DataBindingMode :ivar description: :vartype description: str :ivar uri: :vartype uri: ~flow.models.MfeInternalUriReference :ivar value: :vartype value: str :ivar asset_uri: :vartype asset_uri: str :ivar job_input_type: Possible values include: "Dataset", "Uri", "Literal", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :vartype job_input_type: str or ~flow.models.JobInputType """ _attribute_map = { 'data_id': {'key': 'dataId', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'mode': {'key': 'mode', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'MfeInternalUriReference'}, 'value': {'key': 'value', 'type': 'str'}, 'asset_uri': {'key': 'assetUri', 'type': 'str'}, 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, } def __init__( self, *, data_id: Optional[str] = None, path_on_compute: Optional[str] = None, mode: Optional[Union[str, "DataBindingMode"]] = None, description: Optional[str] = None, uri: Optional["MfeInternalUriReference"] = None, value: Optional[str] = None, asset_uri: Optional[str] = None, job_input_type: Optional[Union[str, "JobInputType"]] = None, **kwargs ): """ :keyword data_id: :paramtype data_id: str :keyword path_on_compute: :paramtype path_on_compute: str :keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :paramtype mode: str or ~flow.models.DataBindingMode :keyword description: :paramtype description: str :keyword uri: :paramtype uri: ~flow.models.MfeInternalUriReference :keyword value: :paramtype value: str :keyword asset_uri: :paramtype asset_uri: str :keyword job_input_type: Possible values include: "Dataset", "Uri", "Literal", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :paramtype job_input_type: str or ~flow.models.JobInputType """ super(InputDataBinding, self).__init__(**kwargs) self.data_id = data_id self.path_on_compute = path_on_compute self.mode = mode self.description = description self.uri = uri self.value = value self.asset_uri = asset_uri self.job_input_type = job_input_type class InputDefinition(msrest.serialization.Model): """InputDefinition. :ivar name: :vartype name: str :ivar type: :vartype type: list[str or ~flow.models.ValueType] :ivar default: Anything. :vartype default: any :ivar description: :vartype description: str :ivar enum: :vartype enum: list[str] :ivar enabled_by: :vartype enabled_by: str :ivar enabled_by_type: :vartype enabled_by_type: list[str or ~flow.models.ValueType] :ivar enabled_by_value: :vartype enabled_by_value: list[any] :ivar model_list: :vartype model_list: list[str] :ivar capabilities: :vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities :ivar dynamic_list: :vartype dynamic_list: ~flow.models.ToolInputDynamicList :ivar allow_manual_entry: :vartype allow_manual_entry: bool :ivar is_multi_select: :vartype is_multi_select: bool :ivar generated_by: :vartype generated_by: ~flow.models.ToolInputGeneratedBy :ivar input_type: Possible values include: "default", "uionly_hidden". :vartype input_type: str or ~flow.models.InputType :ivar advanced: :vartype advanced: bool :ivar ui_hints: This is a dictionary. :vartype ui_hints: dict[str, any] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': '[str]'}, 'default': {'key': 'default', 'type': 'object'}, 'description': {'key': 'description', 'type': 'str'}, 'enum': {'key': 'enum', 'type': '[str]'}, 'enabled_by': {'key': 'enabled_by', 'type': 'str'}, 'enabled_by_type': {'key': 'enabled_by_type', 'type': '[str]'}, 'enabled_by_value': {'key': 'enabled_by_value', 'type': '[object]'}, 'model_list': {'key': 'model_list', 'type': '[str]'}, 'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'}, 'dynamic_list': {'key': 'dynamic_list', 'type': 'ToolInputDynamicList'}, 'allow_manual_entry': {'key': 'allow_manual_entry', 'type': 'bool'}, 'is_multi_select': {'key': 'is_multi_select', 'type': 'bool'}, 'generated_by': {'key': 'generated_by', 'type': 'ToolInputGeneratedBy'}, 'input_type': {'key': 'input_type', 'type': 'str'}, 'advanced': {'key': 'advanced', 'type': 'bool'}, 'ui_hints': {'key': 'ui_hints', 'type': '{object}'}, } def __init__( self, *, name: Optional[str] = None, type: Optional[List[Union[str, "ValueType"]]] = None, default: Optional[Any] = None, description: Optional[str] = None, enum: Optional[List[str]] = None, enabled_by: Optional[str] = None, enabled_by_type: Optional[List[Union[str, "ValueType"]]] = None, enabled_by_value: Optional[List[Any]] = None, model_list: Optional[List[str]] = None, capabilities: Optional["AzureOpenAIModelCapabilities"] = None, dynamic_list: Optional["ToolInputDynamicList"] = None, allow_manual_entry: Optional[bool] = None, is_multi_select: Optional[bool] = None, generated_by: Optional["ToolInputGeneratedBy"] = None, input_type: Optional[Union[str, "InputType"]] = None, advanced: Optional[bool] = None, ui_hints: Optional[Dict[str, Any]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: :paramtype type: list[str or ~flow.models.ValueType] :keyword default: Anything. :paramtype default: any :keyword description: :paramtype description: str :keyword enum: :paramtype enum: list[str] :keyword enabled_by: :paramtype enabled_by: str :keyword enabled_by_type: :paramtype enabled_by_type: list[str or ~flow.models.ValueType] :keyword enabled_by_value: :paramtype enabled_by_value: list[any] :keyword model_list: :paramtype model_list: list[str] :keyword capabilities: :paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities :keyword dynamic_list: :paramtype dynamic_list: ~flow.models.ToolInputDynamicList :keyword allow_manual_entry: :paramtype allow_manual_entry: bool :keyword is_multi_select: :paramtype is_multi_select: bool :keyword generated_by: :paramtype generated_by: ~flow.models.ToolInputGeneratedBy :keyword input_type: Possible values include: "default", "uionly_hidden". :paramtype input_type: str or ~flow.models.InputType :keyword advanced: :paramtype advanced: bool :keyword ui_hints: This is a dictionary. :paramtype ui_hints: dict[str, any] """ super(InputDefinition, self).__init__(**kwargs) self.name = name self.type = type self.default = default self.description = description self.enum = enum self.enabled_by = enabled_by self.enabled_by_type = enabled_by_type self.enabled_by_value = enabled_by_value self.model_list = model_list self.capabilities = capabilities self.dynamic_list = dynamic_list self.allow_manual_entry = allow_manual_entry self.is_multi_select = is_multi_select self.generated_by = generated_by self.input_type = input_type self.advanced = advanced self.ui_hints = ui_hints class InputOutputPortMetadata(msrest.serialization.Model): """InputOutputPortMetadata. Variables are only populated by the server, and will be ignored when sending a request. :ivar graph_module_node_id: :vartype graph_module_node_id: str :ivar port_name: :vartype port_name: str :ivar schema: :vartype schema: str :ivar name: :vartype name: str :ivar id: :vartype id: str """ _validation = { 'id': {'readonly': True}, } _attribute_map = { 'graph_module_node_id': {'key': 'graphModuleNodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, 'schema': {'key': 'schema', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, } def __init__( self, *, graph_module_node_id: Optional[str] = None, port_name: Optional[str] = None, schema: Optional[str] = None, name: Optional[str] = None, **kwargs ): """ :keyword graph_module_node_id: :paramtype graph_module_node_id: str :keyword port_name: :paramtype port_name: str :keyword schema: :paramtype schema: str :keyword name: :paramtype name: str """ super(InputOutputPortMetadata, self).__init__(**kwargs) self.graph_module_node_id = graph_module_node_id self.port_name = port_name self.schema = schema self.name = name self.id = None class InputSetting(msrest.serialization.Model): """InputSetting. :ivar name: :vartype name: str :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode :ivar path_on_compute: :vartype path_on_compute: str :ivar options: This is a dictionary. :vartype options: dict[str, str] :ivar additional_transformations: :vartype additional_transformations: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'options': {'key': 'options', 'type': '{str}'}, 'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None, path_on_compute: Optional[str] = None, options: Optional[Dict[str, str]] = None, additional_transformations: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode :keyword path_on_compute: :paramtype path_on_compute: str :keyword options: This is a dictionary. :paramtype options: dict[str, str] :keyword additional_transformations: :paramtype additional_transformations: str """ super(InputSetting, self).__init__(**kwargs) self.name = name self.data_store_mode = data_store_mode self.path_on_compute = path_on_compute self.options = options self.additional_transformations = additional_transformations class IntellectualPropertyPublisherInformation(msrest.serialization.Model): """IntellectualPropertyPublisherInformation. :ivar intellectual_property_publisher: :vartype intellectual_property_publisher: str """ _attribute_map = { 'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'}, } def __init__( self, *, intellectual_property_publisher: Optional[str] = None, **kwargs ): """ :keyword intellectual_property_publisher: :paramtype intellectual_property_publisher: str """ super(IntellectualPropertyPublisherInformation, self).__init__(**kwargs) self.intellectual_property_publisher = intellectual_property_publisher class InteractiveConfig(msrest.serialization.Model): """InteractiveConfig. :ivar is_ssh_enabled: :vartype is_ssh_enabled: bool :ivar ssh_public_key: :vartype ssh_public_key: str :ivar is_i_python_enabled: :vartype is_i_python_enabled: bool :ivar is_tensor_board_enabled: :vartype is_tensor_board_enabled: bool :ivar interactive_port: :vartype interactive_port: int """ _attribute_map = { 'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'}, 'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'}, 'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'}, 'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'}, 'interactive_port': {'key': 'interactivePort', 'type': 'int'}, } def __init__( self, *, is_ssh_enabled: Optional[bool] = None, ssh_public_key: Optional[str] = None, is_i_python_enabled: Optional[bool] = None, is_tensor_board_enabled: Optional[bool] = None, interactive_port: Optional[int] = None, **kwargs ): """ :keyword is_ssh_enabled: :paramtype is_ssh_enabled: bool :keyword ssh_public_key: :paramtype ssh_public_key: str :keyword is_i_python_enabled: :paramtype is_i_python_enabled: bool :keyword is_tensor_board_enabled: :paramtype is_tensor_board_enabled: bool :keyword interactive_port: :paramtype interactive_port: int """ super(InteractiveConfig, self).__init__(**kwargs) self.is_ssh_enabled = is_ssh_enabled self.ssh_public_key = ssh_public_key self.is_i_python_enabled = is_i_python_enabled self.is_tensor_board_enabled = is_tensor_board_enabled self.interactive_port = interactive_port class InteractiveConfiguration(msrest.serialization.Model): """InteractiveConfiguration. :ivar is_ssh_enabled: :vartype is_ssh_enabled: bool :ivar ssh_public_key: :vartype ssh_public_key: str :ivar is_i_python_enabled: :vartype is_i_python_enabled: bool :ivar is_tensor_board_enabled: :vartype is_tensor_board_enabled: bool :ivar interactive_port: :vartype interactive_port: int """ _attribute_map = { 'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'}, 'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'}, 'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'}, 'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'}, 'interactive_port': {'key': 'interactivePort', 'type': 'int'}, } def __init__( self, *, is_ssh_enabled: Optional[bool] = None, ssh_public_key: Optional[str] = None, is_i_python_enabled: Optional[bool] = None, is_tensor_board_enabled: Optional[bool] = None, interactive_port: Optional[int] = None, **kwargs ): """ :keyword is_ssh_enabled: :paramtype is_ssh_enabled: bool :keyword ssh_public_key: :paramtype ssh_public_key: str :keyword is_i_python_enabled: :paramtype is_i_python_enabled: bool :keyword is_tensor_board_enabled: :paramtype is_tensor_board_enabled: bool :keyword interactive_port: :paramtype interactive_port: int """ super(InteractiveConfiguration, self).__init__(**kwargs) self.is_ssh_enabled = is_ssh_enabled self.ssh_public_key = ssh_public_key self.is_i_python_enabled = is_i_python_enabled self.is_tensor_board_enabled = is_tensor_board_enabled self.interactive_port = interactive_port class JobCost(msrest.serialization.Model): """JobCost. :ivar charged_cpu_core_seconds: :vartype charged_cpu_core_seconds: float :ivar charged_cpu_memory_megabyte_seconds: :vartype charged_cpu_memory_megabyte_seconds: float :ivar charged_gpu_seconds: :vartype charged_gpu_seconds: float :ivar charged_node_utilization_seconds: :vartype charged_node_utilization_seconds: float """ _attribute_map = { 'charged_cpu_core_seconds': {'key': 'chargedCpuCoreSeconds', 'type': 'float'}, 'charged_cpu_memory_megabyte_seconds': {'key': 'chargedCpuMemoryMegabyteSeconds', 'type': 'float'}, 'charged_gpu_seconds': {'key': 'chargedGpuSeconds', 'type': 'float'}, 'charged_node_utilization_seconds': {'key': 'chargedNodeUtilizationSeconds', 'type': 'float'}, } def __init__( self, *, charged_cpu_core_seconds: Optional[float] = None, charged_cpu_memory_megabyte_seconds: Optional[float] = None, charged_gpu_seconds: Optional[float] = None, charged_node_utilization_seconds: Optional[float] = None, **kwargs ): """ :keyword charged_cpu_core_seconds: :paramtype charged_cpu_core_seconds: float :keyword charged_cpu_memory_megabyte_seconds: :paramtype charged_cpu_memory_megabyte_seconds: float :keyword charged_gpu_seconds: :paramtype charged_gpu_seconds: float :keyword charged_node_utilization_seconds: :paramtype charged_node_utilization_seconds: float """ super(JobCost, self).__init__(**kwargs) self.charged_cpu_core_seconds = charged_cpu_core_seconds self.charged_cpu_memory_megabyte_seconds = charged_cpu_memory_megabyte_seconds self.charged_gpu_seconds = charged_gpu_seconds self.charged_node_utilization_seconds = charged_node_utilization_seconds class JobEndpoint(msrest.serialization.Model): """JobEndpoint. :ivar type: :vartype type: str :ivar port: :vartype port: int :ivar endpoint: :vartype endpoint: str :ivar status: :vartype status: str :ivar error_message: :vartype error_message: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar nodes: :vartype nodes: ~flow.models.MfeInternalNodes """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'port': {'key': 'port', 'type': 'int'}, 'endpoint': {'key': 'endpoint', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'error_message': {'key': 'errorMessage', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'nodes': {'key': 'nodes', 'type': 'MfeInternalNodes'}, } def __init__( self, *, type: Optional[str] = None, port: Optional[int] = None, endpoint: Optional[str] = None, status: Optional[str] = None, error_message: Optional[str] = None, properties: Optional[Dict[str, str]] = None, nodes: Optional["MfeInternalNodes"] = None, **kwargs ): """ :keyword type: :paramtype type: str :keyword port: :paramtype port: int :keyword endpoint: :paramtype endpoint: str :keyword status: :paramtype status: str :keyword error_message: :paramtype error_message: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword nodes: :paramtype nodes: ~flow.models.MfeInternalNodes """ super(JobEndpoint, self).__init__(**kwargs) self.type = type self.port = port self.endpoint = endpoint self.status = status self.error_message = error_message self.properties = properties self.nodes = nodes class JobInput(msrest.serialization.Model): """JobInput. All required parameters must be populated in order to send to Azure. :ivar job_input_type: Required. Possible values include: "Dataset", "Uri", "Literal", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :vartype job_input_type: str or ~flow.models.JobInputType :ivar description: :vartype description: str """ _validation = { 'job_input_type': {'required': True}, } _attribute_map = { 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__( self, *, job_input_type: Union[str, "JobInputType"], description: Optional[str] = None, **kwargs ): """ :keyword job_input_type: Required. Possible values include: "Dataset", "Uri", "Literal", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :paramtype job_input_type: str or ~flow.models.JobInputType :keyword description: :paramtype description: str """ super(JobInput, self).__init__(**kwargs) self.job_input_type = job_input_type self.description = description class JobOutput(msrest.serialization.Model): """JobOutput. All required parameters must be populated in order to send to Azure. :ivar job_output_type: Required. Possible values include: "Uri", "Dataset", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :vartype job_output_type: str or ~flow.models.JobOutputType :ivar description: :vartype description: str :ivar auto_delete_setting: :vartype auto_delete_setting: ~flow.models.AutoDeleteSetting """ _validation = { 'job_output_type': {'required': True}, } _attribute_map = { 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, } def __init__( self, *, job_output_type: Union[str, "JobOutputType"], description: Optional[str] = None, auto_delete_setting: Optional["AutoDeleteSetting"] = None, **kwargs ): """ :keyword job_output_type: Required. Possible values include: "Uri", "Dataset", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :paramtype job_output_type: str or ~flow.models.JobOutputType :keyword description: :paramtype description: str :keyword auto_delete_setting: :paramtype auto_delete_setting: ~flow.models.AutoDeleteSetting """ super(JobOutput, self).__init__(**kwargs) self.job_output_type = job_output_type self.description = description self.auto_delete_setting = auto_delete_setting class JobOutputArtifacts(msrest.serialization.Model): """JobOutputArtifacts. :ivar datastore_id: :vartype datastore_id: str :ivar path: :vartype path: str """ _attribute_map = { 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, 'path': {'key': 'path', 'type': 'str'}, } def __init__( self, *, datastore_id: Optional[str] = None, path: Optional[str] = None, **kwargs ): """ :keyword datastore_id: :paramtype datastore_id: str :keyword path: :paramtype path: str """ super(JobOutputArtifacts, self).__init__(**kwargs) self.datastore_id = datastore_id self.path = path class JobScheduleDto(msrest.serialization.Model): """JobScheduleDto. :ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :vartype job_type: str or ~flow.models.JobType :ivar system_data: :vartype system_data: ~flow.models.SystemData :ivar name: :vartype name: str :ivar job_definition_id: :vartype job_definition_id: str :ivar display_name: :vartype display_name: str :ivar trigger_type: Possible values include: "Recurrence", "Cron". :vartype trigger_type: str or ~flow.models.TriggerType :ivar recurrence: :vartype recurrence: ~flow.models.Recurrence :ivar cron: :vartype cron: ~flow.models.Cron :ivar status: Possible values include: "Enabled", "Disabled". :vartype status: str or ~flow.models.ScheduleStatus :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'job_type': {'key': 'jobType', 'type': 'str'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'name': {'key': 'name', 'type': 'str'}, 'job_definition_id': {'key': 'jobDefinitionId', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'trigger_type': {'key': 'triggerType', 'type': 'str'}, 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, 'cron': {'key': 'cron', 'type': 'Cron'}, 'status': {'key': 'status', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, *, job_type: Optional[Union[str, "JobType"]] = None, system_data: Optional["SystemData"] = None, name: Optional[str] = None, job_definition_id: Optional[str] = None, display_name: Optional[str] = None, trigger_type: Optional[Union[str, "TriggerType"]] = None, recurrence: Optional["Recurrence"] = None, cron: Optional["Cron"] = None, status: Optional[Union[str, "ScheduleStatus"]] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :paramtype job_type: str or ~flow.models.JobType :keyword system_data: :paramtype system_data: ~flow.models.SystemData :keyword name: :paramtype name: str :keyword job_definition_id: :paramtype job_definition_id: str :keyword display_name: :paramtype display_name: str :keyword trigger_type: Possible values include: "Recurrence", "Cron". :paramtype trigger_type: str or ~flow.models.TriggerType :keyword recurrence: :paramtype recurrence: ~flow.models.Recurrence :keyword cron: :paramtype cron: ~flow.models.Cron :keyword status: Possible values include: "Enabled", "Disabled". :paramtype status: str or ~flow.models.ScheduleStatus :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(JobScheduleDto, self).__init__(**kwargs) self.job_type = job_type self.system_data = system_data self.name = name self.job_definition_id = job_definition_id self.display_name = display_name self.trigger_type = trigger_type self.recurrence = recurrence self.cron = cron self.status = status self.description = description self.tags = tags self.properties = properties class K8SConfiguration(msrest.serialization.Model): """K8SConfiguration. :ivar max_retry_count: :vartype max_retry_count: int :ivar resource_configuration: :vartype resource_configuration: ~flow.models.ResourceConfig :ivar priority_configuration: :vartype priority_configuration: ~flow.models.PriorityConfig :ivar interactive_configuration: :vartype interactive_configuration: ~flow.models.InteractiveConfig """ _attribute_map = { 'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'}, 'resource_configuration': {'key': 'resourceConfiguration', 'type': 'ResourceConfig'}, 'priority_configuration': {'key': 'priorityConfiguration', 'type': 'PriorityConfig'}, 'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'InteractiveConfig'}, } def __init__( self, *, max_retry_count: Optional[int] = None, resource_configuration: Optional["ResourceConfig"] = None, priority_configuration: Optional["PriorityConfig"] = None, interactive_configuration: Optional["InteractiveConfig"] = None, **kwargs ): """ :keyword max_retry_count: :paramtype max_retry_count: int :keyword resource_configuration: :paramtype resource_configuration: ~flow.models.ResourceConfig :keyword priority_configuration: :paramtype priority_configuration: ~flow.models.PriorityConfig :keyword interactive_configuration: :paramtype interactive_configuration: ~flow.models.InteractiveConfig """ super(K8SConfiguration, self).__init__(**kwargs) self.max_retry_count = max_retry_count self.resource_configuration = resource_configuration self.priority_configuration = priority_configuration self.interactive_configuration = interactive_configuration class KeyValuePairComponentNameMetaInfoErrorResponse(msrest.serialization.Model): """KeyValuePairComponentNameMetaInfoErrorResponse. :ivar key: :vartype key: ~flow.models.ComponentNameMetaInfo :ivar value: The error response. :vartype value: ~flow.models.ErrorResponse """ _attribute_map = { 'key': {'key': 'key', 'type': 'ComponentNameMetaInfo'}, 'value': {'key': 'value', 'type': 'ErrorResponse'}, } def __init__( self, *, key: Optional["ComponentNameMetaInfo"] = None, value: Optional["ErrorResponse"] = None, **kwargs ): """ :keyword key: :paramtype key: ~flow.models.ComponentNameMetaInfo :keyword value: The error response. :paramtype value: ~flow.models.ErrorResponse """ super(KeyValuePairComponentNameMetaInfoErrorResponse, self).__init__(**kwargs) self.key = key self.value = value class KeyValuePairComponentNameMetaInfoModuleDto(msrest.serialization.Model): """KeyValuePairComponentNameMetaInfoModuleDto. :ivar key: :vartype key: ~flow.models.ComponentNameMetaInfo :ivar value: :vartype value: ~flow.models.ModuleDto """ _attribute_map = { 'key': {'key': 'key', 'type': 'ComponentNameMetaInfo'}, 'value': {'key': 'value', 'type': 'ModuleDto'}, } def __init__( self, *, key: Optional["ComponentNameMetaInfo"] = None, value: Optional["ModuleDto"] = None, **kwargs ): """ :keyword key: :paramtype key: ~flow.models.ComponentNameMetaInfo :keyword value: :paramtype value: ~flow.models.ModuleDto """ super(KeyValuePairComponentNameMetaInfoModuleDto, self).__init__(**kwargs) self.key = key self.value = value class KeyValuePairStringObject(msrest.serialization.Model): """KeyValuePairStringObject. :ivar key: :vartype key: str :ivar value: Anything. :vartype value: any """ _attribute_map = { 'key': {'key': 'key', 'type': 'str'}, 'value': {'key': 'value', 'type': 'object'}, } def __init__( self, *, key: Optional[str] = None, value: Optional[Any] = None, **kwargs ): """ :keyword key: :paramtype key: str :keyword value: Anything. :paramtype value: any """ super(KeyValuePairStringObject, self).__init__(**kwargs) self.key = key self.value = value class KubernetesConfiguration(msrest.serialization.Model): """KubernetesConfiguration. :ivar instance_type: :vartype instance_type: str """ _attribute_map = { 'instance_type': {'key': 'instanceType', 'type': 'str'}, } def __init__( self, *, instance_type: Optional[str] = None, **kwargs ): """ :keyword instance_type: :paramtype instance_type: str """ super(KubernetesConfiguration, self).__init__(**kwargs) self.instance_type = instance_type class Kwarg(msrest.serialization.Model): """Kwarg. :ivar key: :vartype key: str :ivar value: :vartype value: str """ _attribute_map = { 'key': {'key': 'key', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, *, key: Optional[str] = None, value: Optional[str] = None, **kwargs ): """ :keyword key: :paramtype key: str :keyword value: :paramtype value: str """ super(Kwarg, self).__init__(**kwargs) self.key = key self.value = value class LegacyDataPath(msrest.serialization.Model): """LegacyDataPath. :ivar data_store_name: :vartype data_store_name: str :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode :ivar relative_path: :vartype relative_path: str """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, } def __init__( self, *, data_store_name: Optional[str] = None, data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None, relative_path: Optional[str] = None, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode :keyword relative_path: :paramtype relative_path: str """ super(LegacyDataPath, self).__init__(**kwargs) self.data_store_name = data_store_name self.data_store_mode = data_store_mode self.relative_path = relative_path class LimitSettings(msrest.serialization.Model): """LimitSettings. :ivar max_trials: :vartype max_trials: int :ivar timeout: :vartype timeout: str :ivar trial_timeout: :vartype trial_timeout: str :ivar max_concurrent_trials: :vartype max_concurrent_trials: int :ivar max_cores_per_trial: :vartype max_cores_per_trial: int :ivar exit_score: :vartype exit_score: float :ivar enable_early_termination: :vartype enable_early_termination: bool :ivar max_nodes: :vartype max_nodes: int """ _attribute_map = { 'max_trials': {'key': 'maxTrials', 'type': 'int'}, 'timeout': {'key': 'timeout', 'type': 'str'}, 'trial_timeout': {'key': 'trialTimeout', 'type': 'str'}, 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, 'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'}, 'exit_score': {'key': 'exitScore', 'type': 'float'}, 'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'}, 'max_nodes': {'key': 'maxNodes', 'type': 'int'}, } def __init__( self, *, max_trials: Optional[int] = None, timeout: Optional[str] = None, trial_timeout: Optional[str] = None, max_concurrent_trials: Optional[int] = None, max_cores_per_trial: Optional[int] = None, exit_score: Optional[float] = None, enable_early_termination: Optional[bool] = None, max_nodes: Optional[int] = None, **kwargs ): """ :keyword max_trials: :paramtype max_trials: int :keyword timeout: :paramtype timeout: str :keyword trial_timeout: :paramtype trial_timeout: str :keyword max_concurrent_trials: :paramtype max_concurrent_trials: int :keyword max_cores_per_trial: :paramtype max_cores_per_trial: int :keyword exit_score: :paramtype exit_score: float :keyword enable_early_termination: :paramtype enable_early_termination: bool :keyword max_nodes: :paramtype max_nodes: int """ super(LimitSettings, self).__init__(**kwargs) self.max_trials = max_trials self.timeout = timeout self.trial_timeout = trial_timeout self.max_concurrent_trials = max_concurrent_trials self.max_cores_per_trial = max_cores_per_trial self.exit_score = exit_score self.enable_early_termination = enable_early_termination self.max_nodes = max_nodes class LinkedADBWorkspaceMetadata(msrest.serialization.Model): """LinkedADBWorkspaceMetadata. :ivar workspace_id: :vartype workspace_id: str :ivar region: :vartype region: str """ _attribute_map = { 'workspace_id': {'key': 'workspaceId', 'type': 'str'}, 'region': {'key': 'region', 'type': 'str'}, } def __init__( self, *, workspace_id: Optional[str] = None, region: Optional[str] = None, **kwargs ): """ :keyword workspace_id: :paramtype workspace_id: str :keyword region: :paramtype region: str """ super(LinkedADBWorkspaceMetadata, self).__init__(**kwargs) self.workspace_id = workspace_id self.region = region class LinkedPipelineInfo(msrest.serialization.Model): """LinkedPipelineInfo. :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar module_node_id: :vartype module_node_id: str :ivar port_name: :vartype port_name: str :ivar linked_pipeline_draft_id: :vartype linked_pipeline_draft_id: str :ivar linked_pipeline_run_id: :vartype linked_pipeline_run_id: str :ivar is_direct_link: :vartype is_direct_link: bool """ _attribute_map = { 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'module_node_id': {'key': 'moduleNodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, 'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'}, 'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'}, 'is_direct_link': {'key': 'isDirectLink', 'type': 'bool'}, } def __init__( self, *, pipeline_type: Optional[Union[str, "PipelineType"]] = None, module_node_id: Optional[str] = None, port_name: Optional[str] = None, linked_pipeline_draft_id: Optional[str] = None, linked_pipeline_run_id: Optional[str] = None, is_direct_link: Optional[bool] = None, **kwargs ): """ :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword module_node_id: :paramtype module_node_id: str :keyword port_name: :paramtype port_name: str :keyword linked_pipeline_draft_id: :paramtype linked_pipeline_draft_id: str :keyword linked_pipeline_run_id: :paramtype linked_pipeline_run_id: str :keyword is_direct_link: :paramtype is_direct_link: bool """ super(LinkedPipelineInfo, self).__init__(**kwargs) self.pipeline_type = pipeline_type self.module_node_id = module_node_id self.port_name = port_name self.linked_pipeline_draft_id = linked_pipeline_draft_id self.linked_pipeline_run_id = linked_pipeline_run_id self.is_direct_link = is_direct_link class LoadFlowAsComponentRequest(msrest.serialization.Model): """LoadFlowAsComponentRequest. :ivar component_name: :vartype component_name: str :ivar component_version: :vartype component_version: str :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar is_deterministic: :vartype is_deterministic: bool :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar flow_definition_resource_id: :vartype flow_definition_resource_id: str :ivar flow_definition_data_store_name: :vartype flow_definition_data_store_name: str :ivar flow_definition_blob_path: :vartype flow_definition_blob_path: str :ivar flow_definition_data_uri: :vartype flow_definition_data_uri: str :ivar node_variant: :vartype node_variant: str :ivar inputs_mapping: This is a dictionary. :vartype inputs_mapping: dict[str, str] :ivar connections: This is a dictionary. :vartype connections: dict[str, dict[str, str]] :ivar environment_variables: This is a dictionary. :vartype environment_variables: dict[str, str] :ivar runtime_name: :vartype runtime_name: str :ivar session_id: :vartype session_id: str :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long """ _attribute_map = { 'component_name': {'key': 'componentName', 'type': 'str'}, 'component_version': {'key': 'componentVersion', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'flow_definition_resource_id': {'key': 'flowDefinitionResourceId', 'type': 'str'}, 'flow_definition_data_store_name': {'key': 'flowDefinitionDataStoreName', 'type': 'str'}, 'flow_definition_blob_path': {'key': 'flowDefinitionBlobPath', 'type': 'str'}, 'flow_definition_data_uri': {'key': 'flowDefinitionDataUri', 'type': 'str'}, 'node_variant': {'key': 'nodeVariant', 'type': 'str'}, 'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'}, 'connections': {'key': 'connections', 'type': '{{str}}'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, } def __init__( self, *, component_name: Optional[str] = None, component_version: Optional[str] = None, display_name: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, is_deterministic: Optional[bool] = None, flow_definition_file_path: Optional[str] = None, flow_definition_resource_id: Optional[str] = None, flow_definition_data_store_name: Optional[str] = None, flow_definition_blob_path: Optional[str] = None, flow_definition_data_uri: Optional[str] = None, node_variant: Optional[str] = None, inputs_mapping: Optional[Dict[str, str]] = None, connections: Optional[Dict[str, Dict[str, str]]] = None, environment_variables: Optional[Dict[str, str]] = None, runtime_name: Optional[str] = None, session_id: Optional[str] = None, vm_size: Optional[str] = None, max_idle_time_seconds: Optional[int] = None, **kwargs ): """ :keyword component_name: :paramtype component_name: str :keyword component_version: :paramtype component_version: str :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword is_deterministic: :paramtype is_deterministic: bool :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword flow_definition_resource_id: :paramtype flow_definition_resource_id: str :keyword flow_definition_data_store_name: :paramtype flow_definition_data_store_name: str :keyword flow_definition_blob_path: :paramtype flow_definition_blob_path: str :keyword flow_definition_data_uri: :paramtype flow_definition_data_uri: str :keyword node_variant: :paramtype node_variant: str :keyword inputs_mapping: This is a dictionary. :paramtype inputs_mapping: dict[str, str] :keyword connections: This is a dictionary. :paramtype connections: dict[str, dict[str, str]] :keyword environment_variables: This is a dictionary. :paramtype environment_variables: dict[str, str] :keyword runtime_name: :paramtype runtime_name: str :keyword session_id: :paramtype session_id: str :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long """ super(LoadFlowAsComponentRequest, self).__init__(**kwargs) self.component_name = component_name self.component_version = component_version self.display_name = display_name self.description = description self.tags = tags self.properties = properties self.is_deterministic = is_deterministic self.flow_definition_file_path = flow_definition_file_path self.flow_definition_resource_id = flow_definition_resource_id self.flow_definition_data_store_name = flow_definition_data_store_name self.flow_definition_blob_path = flow_definition_blob_path self.flow_definition_data_uri = flow_definition_data_uri self.node_variant = node_variant self.inputs_mapping = inputs_mapping self.connections = connections self.environment_variables = environment_variables self.runtime_name = runtime_name self.session_id = session_id self.vm_size = vm_size self.max_idle_time_seconds = max_idle_time_seconds class LogRunTerminatedEventDto(msrest.serialization.Model): """LogRunTerminatedEventDto. :ivar next_action_interval_in_seconds: :vartype next_action_interval_in_seconds: int :ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent". :vartype action_type: str or ~flow.models.ActionType :ivar last_checked_time: :vartype last_checked_time: ~datetime.datetime """ _attribute_map = { 'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'}, 'action_type': {'key': 'actionType', 'type': 'str'}, 'last_checked_time': {'key': 'lastCheckedTime', 'type': 'iso-8601'}, } def __init__( self, *, next_action_interval_in_seconds: Optional[int] = None, action_type: Optional[Union[str, "ActionType"]] = None, last_checked_time: Optional[datetime.datetime] = None, **kwargs ): """ :keyword next_action_interval_in_seconds: :paramtype next_action_interval_in_seconds: int :keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent". :paramtype action_type: str or ~flow.models.ActionType :keyword last_checked_time: :paramtype last_checked_time: ~datetime.datetime """ super(LogRunTerminatedEventDto, self).__init__(**kwargs) self.next_action_interval_in_seconds = next_action_interval_in_seconds self.action_type = action_type self.last_checked_time = last_checked_time class LongRunningOperationUriResponse(msrest.serialization.Model): """LongRunningOperationUriResponse. :ivar location: :vartype location: str :ivar operation_result: :vartype operation_result: str """ _attribute_map = { 'location': {'key': 'location', 'type': 'str'}, 'operation_result': {'key': 'operationResult', 'type': 'str'}, } def __init__( self, *, location: Optional[str] = None, operation_result: Optional[str] = None, **kwargs ): """ :keyword location: :paramtype location: str :keyword operation_result: :paramtype operation_result: str """ super(LongRunningOperationUriResponse, self).__init__(**kwargs) self.location = location self.operation_result = operation_result class LongRunningUpdateRegistryComponentRequest(msrest.serialization.Model): """LongRunningUpdateRegistryComponentRequest. :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar registry_name: :vartype registry_name: str :ivar component_name: :vartype component_name: str :ivar component_version: :vartype component_version: str :ivar update_type: Possible values include: "EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags". :vartype update_type: str or ~flow.models.LongRunningUpdateType """ _attribute_map = { 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'registry_name': {'key': 'registryName', 'type': 'str'}, 'component_name': {'key': 'componentName', 'type': 'str'}, 'component_version': {'key': 'componentVersion', 'type': 'str'}, 'update_type': {'key': 'updateType', 'type': 'str'}, } def __init__( self, *, display_name: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, registry_name: Optional[str] = None, component_name: Optional[str] = None, component_version: Optional[str] = None, update_type: Optional[Union[str, "LongRunningUpdateType"]] = None, **kwargs ): """ :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword registry_name: :paramtype registry_name: str :keyword component_name: :paramtype component_name: str :keyword component_version: :paramtype component_version: str :keyword update_type: Possible values include: "EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags". :paramtype update_type: str or ~flow.models.LongRunningUpdateType """ super(LongRunningUpdateRegistryComponentRequest, self).__init__(**kwargs) self.display_name = display_name self.description = description self.tags = tags self.registry_name = registry_name self.component_name = component_name self.component_version = component_version self.update_type = update_type class ManagedServiceIdentity(msrest.serialization.Model): """ManagedServiceIdentity. All required parameters must be populated in order to send to Azure. :ivar type: Required. Possible values include: "SystemAssigned", "UserAssigned", "SystemAssignedUserAssigned", "None". :vartype type: str or ~flow.models.ManagedServiceIdentityType :ivar principal_id: :vartype principal_id: str :ivar tenant_id: :vartype tenant_id: str :ivar user_assigned_identities: Dictionary of :code:`<UserAssignedIdentity>`. :vartype user_assigned_identities: dict[str, ~flow.models.UserAssignedIdentity] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, } def __init__( self, *, type: Union[str, "ManagedServiceIdentityType"], principal_id: Optional[str] = None, tenant_id: Optional[str] = None, user_assigned_identities: Optional[Dict[str, "UserAssignedIdentity"]] = None, **kwargs ): """ :keyword type: Required. Possible values include: "SystemAssigned", "UserAssigned", "SystemAssignedUserAssigned", "None". :paramtype type: str or ~flow.models.ManagedServiceIdentityType :keyword principal_id: :paramtype principal_id: str :keyword tenant_id: :paramtype tenant_id: str :keyword user_assigned_identities: Dictionary of :code:`<UserAssignedIdentity>`. :paramtype user_assigned_identities: dict[str, ~flow.models.UserAssignedIdentity] """ super(ManagedServiceIdentity, self).__init__(**kwargs) self.type = type self.principal_id = principal_id self.tenant_id = tenant_id self.user_assigned_identities = user_assigned_identities class MavenLibraryDto(msrest.serialization.Model): """MavenLibraryDto. :ivar coordinates: :vartype coordinates: str :ivar repo: :vartype repo: str :ivar exclusions: :vartype exclusions: list[str] """ _attribute_map = { 'coordinates': {'key': 'coordinates', 'type': 'str'}, 'repo': {'key': 'repo', 'type': 'str'}, 'exclusions': {'key': 'exclusions', 'type': '[str]'}, } def __init__( self, *, coordinates: Optional[str] = None, repo: Optional[str] = None, exclusions: Optional[List[str]] = None, **kwargs ): """ :keyword coordinates: :paramtype coordinates: str :keyword repo: :paramtype repo: str :keyword exclusions: :paramtype exclusions: list[str] """ super(MavenLibraryDto, self).__init__(**kwargs) self.coordinates = coordinates self.repo = repo self.exclusions = exclusions class MetricProperties(msrest.serialization.Model): """MetricProperties. :ivar ux_metric_type: :vartype ux_metric_type: str """ _attribute_map = { 'ux_metric_type': {'key': 'uxMetricType', 'type': 'str'}, } def __init__( self, *, ux_metric_type: Optional[str] = None, **kwargs ): """ :keyword ux_metric_type: :paramtype ux_metric_type: str """ super(MetricProperties, self).__init__(**kwargs) self.ux_metric_type = ux_metric_type class MetricSchemaDto(msrest.serialization.Model): """MetricSchemaDto. :ivar num_properties: :vartype num_properties: int :ivar properties: :vartype properties: list[~flow.models.MetricSchemaPropertyDto] """ _attribute_map = { 'num_properties': {'key': 'numProperties', 'type': 'int'}, 'properties': {'key': 'properties', 'type': '[MetricSchemaPropertyDto]'}, } def __init__( self, *, num_properties: Optional[int] = None, properties: Optional[List["MetricSchemaPropertyDto"]] = None, **kwargs ): """ :keyword num_properties: :paramtype num_properties: int :keyword properties: :paramtype properties: list[~flow.models.MetricSchemaPropertyDto] """ super(MetricSchemaDto, self).__init__(**kwargs) self.num_properties = num_properties self.properties = properties class MetricSchemaPropertyDto(msrest.serialization.Model): """MetricSchemaPropertyDto. :ivar property_id: :vartype property_id: str :ivar name: :vartype name: str :ivar type: :vartype type: str """ _attribute_map = { 'property_id': {'key': 'propertyId', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, *, property_id: Optional[str] = None, name: Optional[str] = None, type: Optional[str] = None, **kwargs ): """ :keyword property_id: :paramtype property_id: str :keyword name: :paramtype name: str :keyword type: :paramtype type: str """ super(MetricSchemaPropertyDto, self).__init__(**kwargs) self.property_id = property_id self.name = name self.type = type class MetricV2Dto(msrest.serialization.Model): """MetricV2Dto. :ivar data_container_id: :vartype data_container_id: str :ivar name: :vartype name: str :ivar columns: This is a dictionary. :vartype columns: dict[str, str or ~flow.models.MetricValueType] :ivar properties: :vartype properties: ~flow.models.MetricProperties :ivar namespace: :vartype namespace: str :ivar standard_schema_id: :vartype standard_schema_id: str :ivar value: :vartype value: list[~flow.models.MetricV2Value] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'data_container_id': {'key': 'dataContainerId', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'columns': {'key': 'columns', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': 'MetricProperties'}, 'namespace': {'key': 'namespace', 'type': 'str'}, 'standard_schema_id': {'key': 'standardSchemaId', 'type': 'str'}, 'value': {'key': 'value', 'type': '[MetricV2Value]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, data_container_id: Optional[str] = None, name: Optional[str] = None, columns: Optional[Dict[str, Union[str, "MetricValueType"]]] = None, properties: Optional["MetricProperties"] = None, namespace: Optional[str] = None, standard_schema_id: Optional[str] = None, value: Optional[List["MetricV2Value"]] = None, continuation_token: Optional[str] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword data_container_id: :paramtype data_container_id: str :keyword name: :paramtype name: str :keyword columns: This is a dictionary. :paramtype columns: dict[str, str or ~flow.models.MetricValueType] :keyword properties: :paramtype properties: ~flow.models.MetricProperties :keyword namespace: :paramtype namespace: str :keyword standard_schema_id: :paramtype standard_schema_id: str :keyword value: :paramtype value: list[~flow.models.MetricV2Value] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(MetricV2Dto, self).__init__(**kwargs) self.data_container_id = data_container_id self.name = name self.columns = columns self.properties = properties self.namespace = namespace self.standard_schema_id = standard_schema_id self.value = value self.continuation_token = continuation_token self.next_link = next_link class MetricV2Value(msrest.serialization.Model): """MetricV2Value. :ivar metric_id: :vartype metric_id: str :ivar created_utc: :vartype created_utc: ~datetime.datetime :ivar step: :vartype step: long :ivar data: Dictionary of :code:`<any>`. :vartype data: dict[str, any] :ivar sas_uri: :vartype sas_uri: str """ _attribute_map = { 'metric_id': {'key': 'metricId', 'type': 'str'}, 'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'}, 'step': {'key': 'step', 'type': 'long'}, 'data': {'key': 'data', 'type': '{object}'}, 'sas_uri': {'key': 'sasUri', 'type': 'str'}, } def __init__( self, *, metric_id: Optional[str] = None, created_utc: Optional[datetime.datetime] = None, step: Optional[int] = None, data: Optional[Dict[str, Any]] = None, sas_uri: Optional[str] = None, **kwargs ): """ :keyword metric_id: :paramtype metric_id: str :keyword created_utc: :paramtype created_utc: ~datetime.datetime :keyword step: :paramtype step: long :keyword data: Dictionary of :code:`<any>`. :paramtype data: dict[str, any] :keyword sas_uri: :paramtype sas_uri: str """ super(MetricV2Value, self).__init__(**kwargs) self.metric_id = metric_id self.created_utc = created_utc self.step = step self.data = data self.sas_uri = sas_uri class MfeInternalAutologgerSettings(msrest.serialization.Model): """MfeInternalAutologgerSettings. :ivar mlflow_autologger: Possible values include: "Enabled", "Disabled". :vartype mlflow_autologger: str or ~flow.models.MfeInternalMLFlowAutologgerState """ _attribute_map = { 'mlflow_autologger': {'key': 'mlflowAutologger', 'type': 'str'}, } def __init__( self, *, mlflow_autologger: Optional[Union[str, "MfeInternalMLFlowAutologgerState"]] = None, **kwargs ): """ :keyword mlflow_autologger: Possible values include: "Enabled", "Disabled". :paramtype mlflow_autologger: str or ~flow.models.MfeInternalMLFlowAutologgerState """ super(MfeInternalAutologgerSettings, self).__init__(**kwargs) self.mlflow_autologger = mlflow_autologger class MfeInternalIdentityConfiguration(msrest.serialization.Model): """MfeInternalIdentityConfiguration. :ivar identity_type: Possible values include: "Managed", "AMLToken", "UserIdentity". :vartype identity_type: str or ~flow.models.MfeInternalIdentityType """ _attribute_map = { 'identity_type': {'key': 'identityType', 'type': 'str'}, } def __init__( self, *, identity_type: Optional[Union[str, "MfeInternalIdentityType"]] = None, **kwargs ): """ :keyword identity_type: Possible values include: "Managed", "AMLToken", "UserIdentity". :paramtype identity_type: str or ~flow.models.MfeInternalIdentityType """ super(MfeInternalIdentityConfiguration, self).__init__(**kwargs) self.identity_type = identity_type class MfeInternalNodes(msrest.serialization.Model): """MfeInternalNodes. :ivar nodes_value_type: The only acceptable values to pass in are None and "All". The default value is None. :vartype nodes_value_type: str """ _attribute_map = { 'nodes_value_type': {'key': 'nodesValueType', 'type': 'str'}, } def __init__( self, *, nodes_value_type: Optional[str] = None, **kwargs ): """ :keyword nodes_value_type: The only acceptable values to pass in are None and "All". The default value is None. :paramtype nodes_value_type: str """ super(MfeInternalNodes, self).__init__(**kwargs) self.nodes_value_type = nodes_value_type class MfeInternalOutputData(msrest.serialization.Model): """MfeInternalOutputData. :ivar dataset_name: :vartype dataset_name: str :ivar datastore: :vartype datastore: str :ivar datapath: :vartype datapath: str :ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :vartype mode: str or ~flow.models.DataBindingMode """ _attribute_map = { 'dataset_name': {'key': 'datasetName', 'type': 'str'}, 'datastore': {'key': 'datastore', 'type': 'str'}, 'datapath': {'key': 'datapath', 'type': 'str'}, 'mode': {'key': 'mode', 'type': 'str'}, } def __init__( self, *, dataset_name: Optional[str] = None, datastore: Optional[str] = None, datapath: Optional[str] = None, mode: Optional[Union[str, "DataBindingMode"]] = None, **kwargs ): """ :keyword dataset_name: :paramtype dataset_name: str :keyword datastore: :paramtype datastore: str :keyword datapath: :paramtype datapath: str :keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :paramtype mode: str or ~flow.models.DataBindingMode """ super(MfeInternalOutputData, self).__init__(**kwargs) self.dataset_name = dataset_name self.datastore = datastore self.datapath = datapath self.mode = mode class MfeInternalSecretConfiguration(msrest.serialization.Model): """MfeInternalSecretConfiguration. :ivar workspace_secret_name: :vartype workspace_secret_name: str :ivar uri: :vartype uri: str """ _attribute_map = { 'workspace_secret_name': {'key': 'workspaceSecretName', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, } def __init__( self, *, workspace_secret_name: Optional[str] = None, uri: Optional[str] = None, **kwargs ): """ :keyword workspace_secret_name: :paramtype workspace_secret_name: str :keyword uri: :paramtype uri: str """ super(MfeInternalSecretConfiguration, self).__init__(**kwargs) self.workspace_secret_name = workspace_secret_name self.uri = uri class MfeInternalUriReference(msrest.serialization.Model): """MfeInternalUriReference. :ivar file: :vartype file: str :ivar folder: :vartype folder: str """ _attribute_map = { 'file': {'key': 'file', 'type': 'str'}, 'folder': {'key': 'folder', 'type': 'str'}, } def __init__( self, *, file: Optional[str] = None, folder: Optional[str] = None, **kwargs ): """ :keyword file: :paramtype file: str :keyword folder: :paramtype folder: str """ super(MfeInternalUriReference, self).__init__(**kwargs) self.file = file self.folder = folder class MfeInternalV20211001ComponentJob(msrest.serialization.Model): """MfeInternalV20211001ComponentJob. :ivar compute_id: :vartype compute_id: str :ivar component_id: :vartype component_id: str :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.JobInput] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.JobOutput] :ivar overrides: Anything. :vartype overrides: any """ _attribute_map = { 'compute_id': {'key': 'computeId', 'type': 'str'}, 'component_id': {'key': 'componentId', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, 'overrides': {'key': 'overrides', 'type': 'object'}, } def __init__( self, *, compute_id: Optional[str] = None, component_id: Optional[str] = None, inputs: Optional[Dict[str, "JobInput"]] = None, outputs: Optional[Dict[str, "JobOutput"]] = None, overrides: Optional[Any] = None, **kwargs ): """ :keyword compute_id: :paramtype compute_id: str :keyword component_id: :paramtype component_id: str :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.JobInput] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.JobOutput] :keyword overrides: Anything. :paramtype overrides: any """ super(MfeInternalV20211001ComponentJob, self).__init__(**kwargs) self.compute_id = compute_id self.component_id = component_id self.inputs = inputs self.outputs = outputs self.overrides = overrides class MinMaxParameterRule(msrest.serialization.Model): """MinMaxParameterRule. :ivar min: :vartype min: float :ivar max: :vartype max: float """ _attribute_map = { 'min': {'key': 'min', 'type': 'float'}, 'max': {'key': 'max', 'type': 'float'}, } def __init__( self, *, min: Optional[float] = None, max: Optional[float] = None, **kwargs ): """ :keyword min: :paramtype min: float :keyword max: :paramtype max: float """ super(MinMaxParameterRule, self).__init__(**kwargs) self.min = min self.max = max class MlcComputeInfo(msrest.serialization.Model): """MlcComputeInfo. :ivar mlc_compute_type: :vartype mlc_compute_type: str """ _attribute_map = { 'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'}, } def __init__( self, *, mlc_compute_type: Optional[str] = None, **kwargs ): """ :keyword mlc_compute_type: :paramtype mlc_compute_type: str """ super(MlcComputeInfo, self).__init__(**kwargs) self.mlc_compute_type = mlc_compute_type class ModelDto(msrest.serialization.Model): """ModelDto. :ivar feed_name: :vartype feed_name: str :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar aml_data_store_name: :vartype aml_data_store_name: str :ivar relative_path: :vartype relative_path: str :ivar id: :vartype id: str :ivar version: :vartype version: str :ivar system_data: :vartype system_data: ~flow.models.SystemData :ivar arm_id: :vartype arm_id: str :ivar online_endpoint_yaml_str: :vartype online_endpoint_yaml_str: str """ _attribute_map = { 'feed_name': {'key': 'feedName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'arm_id': {'key': 'armId', 'type': 'str'}, 'online_endpoint_yaml_str': {'key': 'onlineEndpointYamlStr', 'type': 'str'}, } def __init__( self, *, feed_name: Optional[str] = None, name: Optional[str] = None, description: Optional[str] = None, aml_data_store_name: Optional[str] = None, relative_path: Optional[str] = None, id: Optional[str] = None, version: Optional[str] = None, system_data: Optional["SystemData"] = None, arm_id: Optional[str] = None, online_endpoint_yaml_str: Optional[str] = None, **kwargs ): """ :keyword feed_name: :paramtype feed_name: str :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword aml_data_store_name: :paramtype aml_data_store_name: str :keyword relative_path: :paramtype relative_path: str :keyword id: :paramtype id: str :keyword version: :paramtype version: str :keyword system_data: :paramtype system_data: ~flow.models.SystemData :keyword arm_id: :paramtype arm_id: str :keyword online_endpoint_yaml_str: :paramtype online_endpoint_yaml_str: str """ super(ModelDto, self).__init__(**kwargs) self.feed_name = feed_name self.name = name self.description = description self.aml_data_store_name = aml_data_store_name self.relative_path = relative_path self.id = id self.version = version self.system_data = system_data self.arm_id = arm_id self.online_endpoint_yaml_str = online_endpoint_yaml_str class ModelManagementErrorResponse(msrest.serialization.Model): """ModelManagementErrorResponse. :ivar code: :vartype code: str :ivar status_code: :vartype status_code: int :ivar message: :vartype message: str :ivar target: :vartype target: str :ivar details: :vartype details: list[~flow.models.InnerErrorDetails] :ivar correlation: Dictionary of :code:`<string>`. :vartype correlation: dict[str, str] """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'status_code': {'key': 'statusCode', 'type': 'int'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[InnerErrorDetails]'}, 'correlation': {'key': 'correlation', 'type': '{str}'}, } def __init__( self, *, code: Optional[str] = None, status_code: Optional[int] = None, message: Optional[str] = None, target: Optional[str] = None, details: Optional[List["InnerErrorDetails"]] = None, correlation: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword code: :paramtype code: str :keyword status_code: :paramtype status_code: int :keyword message: :paramtype message: str :keyword target: :paramtype target: str :keyword details: :paramtype details: list[~flow.models.InnerErrorDetails] :keyword correlation: Dictionary of :code:`<string>`. :paramtype correlation: dict[str, str] """ super(ModelManagementErrorResponse, self).__init__(**kwargs) self.code = code self.status_code = status_code self.message = message self.target = target self.details = details self.correlation = correlation class ModifyPipelineJobScheduleDto(msrest.serialization.Model): """ModifyPipelineJobScheduleDto. :ivar pipeline_job_name: :vartype pipeline_job_name: str :ivar pipeline_job_runtime_settings: :vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings :ivar display_name: :vartype display_name: str :ivar trigger_type: Possible values include: "Recurrence", "Cron". :vartype trigger_type: str or ~flow.models.TriggerType :ivar recurrence: :vartype recurrence: ~flow.models.Recurrence :ivar cron: :vartype cron: ~flow.models.Cron :ivar status: Possible values include: "Enabled", "Disabled". :vartype status: str or ~flow.models.ScheduleStatus :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'}, 'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'trigger_type': {'key': 'triggerType', 'type': 'str'}, 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, 'cron': {'key': 'cron', 'type': 'Cron'}, 'status': {'key': 'status', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, *, pipeline_job_name: Optional[str] = None, pipeline_job_runtime_settings: Optional["PipelineJobRuntimeBasicSettings"] = None, display_name: Optional[str] = None, trigger_type: Optional[Union[str, "TriggerType"]] = None, recurrence: Optional["Recurrence"] = None, cron: Optional["Cron"] = None, status: Optional[Union[str, "ScheduleStatus"]] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword pipeline_job_name: :paramtype pipeline_job_name: str :keyword pipeline_job_runtime_settings: :paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings :keyword display_name: :paramtype display_name: str :keyword trigger_type: Possible values include: "Recurrence", "Cron". :paramtype trigger_type: str or ~flow.models.TriggerType :keyword recurrence: :paramtype recurrence: ~flow.models.Recurrence :keyword cron: :paramtype cron: ~flow.models.Cron :keyword status: Possible values include: "Enabled", "Disabled". :paramtype status: str or ~flow.models.ScheduleStatus :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(ModifyPipelineJobScheduleDto, self).__init__(**kwargs) self.pipeline_job_name = pipeline_job_name self.pipeline_job_runtime_settings = pipeline_job_runtime_settings self.display_name = display_name self.trigger_type = trigger_type self.recurrence = recurrence self.cron = cron self.status = status self.description = description self.tags = tags self.properties = properties class ModuleDto(msrest.serialization.Model): """ModuleDto. :ivar namespace: :vartype namespace: str :ivar tags: A set of tags. :vartype tags: list[str] :ivar display_name: :vartype display_name: str :ivar dict_tags: Dictionary of :code:`<string>`. :vartype dict_tags: dict[str, str] :ivar module_version_id: :vartype module_version_id: str :ivar feed_name: :vartype feed_name: str :ivar registry_name: :vartype registry_name: str :ivar module_name: :vartype module_name: str :ivar module_version: :vartype module_version: str :ivar description: :vartype description: str :ivar owner: :vartype owner: str :ivar job_type: :vartype job_type: str :ivar default_version: :vartype default_version: str :ivar family_id: :vartype family_id: str :ivar help_document: :vartype help_document: str :ivar codegen_by: :vartype codegen_by: str :ivar arm_id: :vartype arm_id: str :ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated". :vartype module_scope: str or ~flow.models.ModuleScope :ivar module_entity: :vartype module_entity: ~flow.models.ModuleEntity :ivar input_types: :vartype input_types: list[str] :ivar output_types: :vartype output_types: list[str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar yaml_link: :vartype yaml_link: str :ivar yaml_link_with_commit_sha: :vartype yaml_link_with_commit_sha: str :ivar module_source_type: Possible values include: "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo". :vartype module_source_type: str or ~flow.models.ModuleSourceType :ivar registered_by: :vartype registered_by: str :ivar versions: :vartype versions: list[~flow.models.AzureMLModuleVersionDescriptor] :ivar is_default_module_version: :vartype is_default_module_version: bool :ivar system_data: :vartype system_data: ~flow.models.SystemData :ivar system_meta: :vartype system_meta: ~flow.models.SystemMeta :ivar snapshot_id: :vartype snapshot_id: str :ivar entry: :vartype entry: str :ivar os_type: :vartype os_type: str :ivar require_gpu: :vartype require_gpu: bool :ivar module_python_interface: :vartype module_python_interface: ~flow.models.ModulePythonInterface :ivar environment_asset_id: :vartype environment_asset_id: str :ivar run_setting_parameters: :vartype run_setting_parameters: list[~flow.models.RunSettingParameter] :ivar supported_ui_input_data_delivery_modes: Dictionary of <componentsΒ·9qwi7eΒ·schemasΒ·moduledtoΒ·propertiesΒ·supporteduiinputdatadeliverymodesΒ·additionalproperties>. :vartype supported_ui_input_data_delivery_modes: dict[str, list[str or ~flow.models.UIInputDataDeliveryMode]] :ivar output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`. :vartype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec] :ivar yaml_str: :vartype yaml_str: str """ _attribute_map = { 'namespace': {'key': 'namespace', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '[str]'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'dict_tags': {'key': 'dictTags', 'type': '{str}'}, 'module_version_id': {'key': 'moduleVersionId', 'type': 'str'}, 'feed_name': {'key': 'feedName', 'type': 'str'}, 'registry_name': {'key': 'registryName', 'type': 'str'}, 'module_name': {'key': 'moduleName', 'type': 'str'}, 'module_version': {'key': 'moduleVersion', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'owner': {'key': 'owner', 'type': 'str'}, 'job_type': {'key': 'jobType', 'type': 'str'}, 'default_version': {'key': 'defaultVersion', 'type': 'str'}, 'family_id': {'key': 'familyId', 'type': 'str'}, 'help_document': {'key': 'helpDocument', 'type': 'str'}, 'codegen_by': {'key': 'codegenBy', 'type': 'str'}, 'arm_id': {'key': 'armId', 'type': 'str'}, 'module_scope': {'key': 'moduleScope', 'type': 'str'}, 'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'}, 'input_types': {'key': 'inputTypes', 'type': '[str]'}, 'output_types': {'key': 'outputTypes', 'type': '[str]'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'yaml_link': {'key': 'yamlLink', 'type': 'str'}, 'yaml_link_with_commit_sha': {'key': 'yamlLinkWithCommitSha', 'type': 'str'}, 'module_source_type': {'key': 'moduleSourceType', 'type': 'str'}, 'registered_by': {'key': 'registeredBy', 'type': 'str'}, 'versions': {'key': 'versions', 'type': '[AzureMLModuleVersionDescriptor]'}, 'is_default_module_version': {'key': 'isDefaultModuleVersion', 'type': 'bool'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'system_meta': {'key': 'systemMeta', 'type': 'SystemMeta'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, 'entry': {'key': 'entry', 'type': 'str'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'require_gpu': {'key': 'requireGpu', 'type': 'bool'}, 'module_python_interface': {'key': 'modulePythonInterface', 'type': 'ModulePythonInterface'}, 'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'}, 'run_setting_parameters': {'key': 'runSettingParameters', 'type': '[RunSettingParameter]'}, 'supported_ui_input_data_delivery_modes': {'key': 'supportedUIInputDataDeliveryModes', 'type': '{[str]}'}, 'output_setting_specs': {'key': 'outputSettingSpecs', 'type': '{OutputSettingSpec}'}, 'yaml_str': {'key': 'yamlStr', 'type': 'str'}, } def __init__( self, *, namespace: Optional[str] = None, tags: Optional[List[str]] = None, display_name: Optional[str] = None, dict_tags: Optional[Dict[str, str]] = None, module_version_id: Optional[str] = None, feed_name: Optional[str] = None, registry_name: Optional[str] = None, module_name: Optional[str] = None, module_version: Optional[str] = None, description: Optional[str] = None, owner: Optional[str] = None, job_type: Optional[str] = None, default_version: Optional[str] = None, family_id: Optional[str] = None, help_document: Optional[str] = None, codegen_by: Optional[str] = None, arm_id: Optional[str] = None, module_scope: Optional[Union[str, "ModuleScope"]] = None, module_entity: Optional["ModuleEntity"] = None, input_types: Optional[List[str]] = None, output_types: Optional[List[str]] = None, entity_status: Optional[Union[str, "EntityStatus"]] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, yaml_link: Optional[str] = None, yaml_link_with_commit_sha: Optional[str] = None, module_source_type: Optional[Union[str, "ModuleSourceType"]] = None, registered_by: Optional[str] = None, versions: Optional[List["AzureMLModuleVersionDescriptor"]] = None, is_default_module_version: Optional[bool] = None, system_data: Optional["SystemData"] = None, system_meta: Optional["SystemMeta"] = None, snapshot_id: Optional[str] = None, entry: Optional[str] = None, os_type: Optional[str] = None, require_gpu: Optional[bool] = None, module_python_interface: Optional["ModulePythonInterface"] = None, environment_asset_id: Optional[str] = None, run_setting_parameters: Optional[List["RunSettingParameter"]] = None, supported_ui_input_data_delivery_modes: Optional[Dict[str, List[Union[str, "UIInputDataDeliveryMode"]]]] = None, output_setting_specs: Optional[Dict[str, "OutputSettingSpec"]] = None, yaml_str: Optional[str] = None, **kwargs ): """ :keyword namespace: :paramtype namespace: str :keyword tags: A set of tags. :paramtype tags: list[str] :keyword display_name: :paramtype display_name: str :keyword dict_tags: Dictionary of :code:`<string>`. :paramtype dict_tags: dict[str, str] :keyword module_version_id: :paramtype module_version_id: str :keyword feed_name: :paramtype feed_name: str :keyword registry_name: :paramtype registry_name: str :keyword module_name: :paramtype module_name: str :keyword module_version: :paramtype module_version: str :keyword description: :paramtype description: str :keyword owner: :paramtype owner: str :keyword job_type: :paramtype job_type: str :keyword default_version: :paramtype default_version: str :keyword family_id: :paramtype family_id: str :keyword help_document: :paramtype help_document: str :keyword codegen_by: :paramtype codegen_by: str :keyword arm_id: :paramtype arm_id: str :keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated". :paramtype module_scope: str or ~flow.models.ModuleScope :keyword module_entity: :paramtype module_entity: ~flow.models.ModuleEntity :keyword input_types: :paramtype input_types: list[str] :keyword output_types: :paramtype output_types: list[str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword yaml_link: :paramtype yaml_link: str :keyword yaml_link_with_commit_sha: :paramtype yaml_link_with_commit_sha: str :keyword module_source_type: Possible values include: "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo". :paramtype module_source_type: str or ~flow.models.ModuleSourceType :keyword registered_by: :paramtype registered_by: str :keyword versions: :paramtype versions: list[~flow.models.AzureMLModuleVersionDescriptor] :keyword is_default_module_version: :paramtype is_default_module_version: bool :keyword system_data: :paramtype system_data: ~flow.models.SystemData :keyword system_meta: :paramtype system_meta: ~flow.models.SystemMeta :keyword snapshot_id: :paramtype snapshot_id: str :keyword entry: :paramtype entry: str :keyword os_type: :paramtype os_type: str :keyword require_gpu: :paramtype require_gpu: bool :keyword module_python_interface: :paramtype module_python_interface: ~flow.models.ModulePythonInterface :keyword environment_asset_id: :paramtype environment_asset_id: str :keyword run_setting_parameters: :paramtype run_setting_parameters: list[~flow.models.RunSettingParameter] :keyword supported_ui_input_data_delivery_modes: Dictionary of <componentsΒ·9qwi7eΒ·schemasΒ·moduledtoΒ·propertiesΒ·supporteduiinputdatadeliverymodesΒ·additionalproperties>. :paramtype supported_ui_input_data_delivery_modes: dict[str, list[str or ~flow.models.UIInputDataDeliveryMode]] :keyword output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`. :paramtype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec] :keyword yaml_str: :paramtype yaml_str: str """ super(ModuleDto, self).__init__(**kwargs) self.namespace = namespace self.tags = tags self.display_name = display_name self.dict_tags = dict_tags self.module_version_id = module_version_id self.feed_name = feed_name self.registry_name = registry_name self.module_name = module_name self.module_version = module_version self.description = description self.owner = owner self.job_type = job_type self.default_version = default_version self.family_id = family_id self.help_document = help_document self.codegen_by = codegen_by self.arm_id = arm_id self.module_scope = module_scope self.module_entity = module_entity self.input_types = input_types self.output_types = output_types self.entity_status = entity_status self.created_date = created_date self.last_modified_date = last_modified_date self.yaml_link = yaml_link self.yaml_link_with_commit_sha = yaml_link_with_commit_sha self.module_source_type = module_source_type self.registered_by = registered_by self.versions = versions self.is_default_module_version = is_default_module_version self.system_data = system_data self.system_meta = system_meta self.snapshot_id = snapshot_id self.entry = entry self.os_type = os_type self.require_gpu = require_gpu self.module_python_interface = module_python_interface self.environment_asset_id = environment_asset_id self.run_setting_parameters = run_setting_parameters self.supported_ui_input_data_delivery_modes = supported_ui_input_data_delivery_modes self.output_setting_specs = output_setting_specs self.yaml_str = yaml_str class ModuleDtoWithErrors(msrest.serialization.Model): """ModuleDtoWithErrors. :ivar version_id_to_module_dto: This is a dictionary. :vartype version_id_to_module_dto: dict[str, ~flow.models.ModuleDto] :ivar name_and_version_to_module_dto: :vartype name_and_version_to_module_dto: list[~flow.models.KeyValuePairComponentNameMetaInfoModuleDto] :ivar version_id_to_error: This is a dictionary. :vartype version_id_to_error: dict[str, ~flow.models.ErrorResponse] :ivar name_and_version_to_error: :vartype name_and_version_to_error: list[~flow.models.KeyValuePairComponentNameMetaInfoErrorResponse] """ _attribute_map = { 'version_id_to_module_dto': {'key': 'versionIdToModuleDto', 'type': '{ModuleDto}'}, 'name_and_version_to_module_dto': {'key': 'nameAndVersionToModuleDto', 'type': '[KeyValuePairComponentNameMetaInfoModuleDto]'}, 'version_id_to_error': {'key': 'versionIdToError', 'type': '{ErrorResponse}'}, 'name_and_version_to_error': {'key': 'nameAndVersionToError', 'type': '[KeyValuePairComponentNameMetaInfoErrorResponse]'}, } def __init__( self, *, version_id_to_module_dto: Optional[Dict[str, "ModuleDto"]] = None, name_and_version_to_module_dto: Optional[List["KeyValuePairComponentNameMetaInfoModuleDto"]] = None, version_id_to_error: Optional[Dict[str, "ErrorResponse"]] = None, name_and_version_to_error: Optional[List["KeyValuePairComponentNameMetaInfoErrorResponse"]] = None, **kwargs ): """ :keyword version_id_to_module_dto: This is a dictionary. :paramtype version_id_to_module_dto: dict[str, ~flow.models.ModuleDto] :keyword name_and_version_to_module_dto: :paramtype name_and_version_to_module_dto: list[~flow.models.KeyValuePairComponentNameMetaInfoModuleDto] :keyword version_id_to_error: This is a dictionary. :paramtype version_id_to_error: dict[str, ~flow.models.ErrorResponse] :keyword name_and_version_to_error: :paramtype name_and_version_to_error: list[~flow.models.KeyValuePairComponentNameMetaInfoErrorResponse] """ super(ModuleDtoWithErrors, self).__init__(**kwargs) self.version_id_to_module_dto = version_id_to_module_dto self.name_and_version_to_module_dto = name_and_version_to_module_dto self.version_id_to_error = version_id_to_error self.name_and_version_to_error = name_and_version_to_error class ModuleDtoWithValidateStatus(msrest.serialization.Model): """ModuleDtoWithValidateStatus. :ivar existing_module_entity: :vartype existing_module_entity: ~flow.models.ModuleEntity :ivar status: Possible values include: "NewModule", "NewVersion", "Conflict", "ParseError", "ProcessRequestError". :vartype status: str or ~flow.models.ModuleInfoFromYamlStatusEnum :ivar status_details: :vartype status_details: str :ivar error_details: :vartype error_details: list[str] :ivar serialized_module_info: :vartype serialized_module_info: str :ivar namespace: :vartype namespace: str :ivar tags: A set of tags. :vartype tags: list[str] :ivar display_name: :vartype display_name: str :ivar dict_tags: Dictionary of :code:`<string>`. :vartype dict_tags: dict[str, str] :ivar module_version_id: :vartype module_version_id: str :ivar feed_name: :vartype feed_name: str :ivar registry_name: :vartype registry_name: str :ivar module_name: :vartype module_name: str :ivar module_version: :vartype module_version: str :ivar description: :vartype description: str :ivar owner: :vartype owner: str :ivar job_type: :vartype job_type: str :ivar default_version: :vartype default_version: str :ivar family_id: :vartype family_id: str :ivar help_document: :vartype help_document: str :ivar codegen_by: :vartype codegen_by: str :ivar arm_id: :vartype arm_id: str :ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated". :vartype module_scope: str or ~flow.models.ModuleScope :ivar module_entity: :vartype module_entity: ~flow.models.ModuleEntity :ivar input_types: :vartype input_types: list[str] :ivar output_types: :vartype output_types: list[str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar yaml_link: :vartype yaml_link: str :ivar yaml_link_with_commit_sha: :vartype yaml_link_with_commit_sha: str :ivar module_source_type: Possible values include: "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo". :vartype module_source_type: str or ~flow.models.ModuleSourceType :ivar registered_by: :vartype registered_by: str :ivar versions: :vartype versions: list[~flow.models.AzureMLModuleVersionDescriptor] :ivar is_default_module_version: :vartype is_default_module_version: bool :ivar system_data: :vartype system_data: ~flow.models.SystemData :ivar system_meta: :vartype system_meta: ~flow.models.SystemMeta :ivar snapshot_id: :vartype snapshot_id: str :ivar entry: :vartype entry: str :ivar os_type: :vartype os_type: str :ivar require_gpu: :vartype require_gpu: bool :ivar module_python_interface: :vartype module_python_interface: ~flow.models.ModulePythonInterface :ivar environment_asset_id: :vartype environment_asset_id: str :ivar run_setting_parameters: :vartype run_setting_parameters: list[~flow.models.RunSettingParameter] :ivar supported_ui_input_data_delivery_modes: Dictionary of <componentsΒ·8o5zajΒ·schemasΒ·moduledtowithvalidatestatusΒ·propertiesΒ·supporteduiinputdatadeliverymodesΒ·additionalproperties>. :vartype supported_ui_input_data_delivery_modes: dict[str, list[str or ~flow.models.UIInputDataDeliveryMode]] :ivar output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`. :vartype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec] :ivar yaml_str: :vartype yaml_str: str """ _attribute_map = { 'existing_module_entity': {'key': 'existingModuleEntity', 'type': 'ModuleEntity'}, 'status': {'key': 'status', 'type': 'str'}, 'status_details': {'key': 'statusDetails', 'type': 'str'}, 'error_details': {'key': 'errorDetails', 'type': '[str]'}, 'serialized_module_info': {'key': 'serializedModuleInfo', 'type': 'str'}, 'namespace': {'key': 'namespace', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '[str]'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'dict_tags': {'key': 'dictTags', 'type': '{str}'}, 'module_version_id': {'key': 'moduleVersionId', 'type': 'str'}, 'feed_name': {'key': 'feedName', 'type': 'str'}, 'registry_name': {'key': 'registryName', 'type': 'str'}, 'module_name': {'key': 'moduleName', 'type': 'str'}, 'module_version': {'key': 'moduleVersion', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'owner': {'key': 'owner', 'type': 'str'}, 'job_type': {'key': 'jobType', 'type': 'str'}, 'default_version': {'key': 'defaultVersion', 'type': 'str'}, 'family_id': {'key': 'familyId', 'type': 'str'}, 'help_document': {'key': 'helpDocument', 'type': 'str'}, 'codegen_by': {'key': 'codegenBy', 'type': 'str'}, 'arm_id': {'key': 'armId', 'type': 'str'}, 'module_scope': {'key': 'moduleScope', 'type': 'str'}, 'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'}, 'input_types': {'key': 'inputTypes', 'type': '[str]'}, 'output_types': {'key': 'outputTypes', 'type': '[str]'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'yaml_link': {'key': 'yamlLink', 'type': 'str'}, 'yaml_link_with_commit_sha': {'key': 'yamlLinkWithCommitSha', 'type': 'str'}, 'module_source_type': {'key': 'moduleSourceType', 'type': 'str'}, 'registered_by': {'key': 'registeredBy', 'type': 'str'}, 'versions': {'key': 'versions', 'type': '[AzureMLModuleVersionDescriptor]'}, 'is_default_module_version': {'key': 'isDefaultModuleVersion', 'type': 'bool'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'system_meta': {'key': 'systemMeta', 'type': 'SystemMeta'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, 'entry': {'key': 'entry', 'type': 'str'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'require_gpu': {'key': 'requireGpu', 'type': 'bool'}, 'module_python_interface': {'key': 'modulePythonInterface', 'type': 'ModulePythonInterface'}, 'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'}, 'run_setting_parameters': {'key': 'runSettingParameters', 'type': '[RunSettingParameter]'}, 'supported_ui_input_data_delivery_modes': {'key': 'supportedUIInputDataDeliveryModes', 'type': '{[str]}'}, 'output_setting_specs': {'key': 'outputSettingSpecs', 'type': '{OutputSettingSpec}'}, 'yaml_str': {'key': 'yamlStr', 'type': 'str'}, } def __init__( self, *, existing_module_entity: Optional["ModuleEntity"] = None, status: Optional[Union[str, "ModuleInfoFromYamlStatusEnum"]] = None, status_details: Optional[str] = None, error_details: Optional[List[str]] = None, serialized_module_info: Optional[str] = None, namespace: Optional[str] = None, tags: Optional[List[str]] = None, display_name: Optional[str] = None, dict_tags: Optional[Dict[str, str]] = None, module_version_id: Optional[str] = None, feed_name: Optional[str] = None, registry_name: Optional[str] = None, module_name: Optional[str] = None, module_version: Optional[str] = None, description: Optional[str] = None, owner: Optional[str] = None, job_type: Optional[str] = None, default_version: Optional[str] = None, family_id: Optional[str] = None, help_document: Optional[str] = None, codegen_by: Optional[str] = None, arm_id: Optional[str] = None, module_scope: Optional[Union[str, "ModuleScope"]] = None, module_entity: Optional["ModuleEntity"] = None, input_types: Optional[List[str]] = None, output_types: Optional[List[str]] = None, entity_status: Optional[Union[str, "EntityStatus"]] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, yaml_link: Optional[str] = None, yaml_link_with_commit_sha: Optional[str] = None, module_source_type: Optional[Union[str, "ModuleSourceType"]] = None, registered_by: Optional[str] = None, versions: Optional[List["AzureMLModuleVersionDescriptor"]] = None, is_default_module_version: Optional[bool] = None, system_data: Optional["SystemData"] = None, system_meta: Optional["SystemMeta"] = None, snapshot_id: Optional[str] = None, entry: Optional[str] = None, os_type: Optional[str] = None, require_gpu: Optional[bool] = None, module_python_interface: Optional["ModulePythonInterface"] = None, environment_asset_id: Optional[str] = None, run_setting_parameters: Optional[List["RunSettingParameter"]] = None, supported_ui_input_data_delivery_modes: Optional[Dict[str, List[Union[str, "UIInputDataDeliveryMode"]]]] = None, output_setting_specs: Optional[Dict[str, "OutputSettingSpec"]] = None, yaml_str: Optional[str] = None, **kwargs ): """ :keyword existing_module_entity: :paramtype existing_module_entity: ~flow.models.ModuleEntity :keyword status: Possible values include: "NewModule", "NewVersion", "Conflict", "ParseError", "ProcessRequestError". :paramtype status: str or ~flow.models.ModuleInfoFromYamlStatusEnum :keyword status_details: :paramtype status_details: str :keyword error_details: :paramtype error_details: list[str] :keyword serialized_module_info: :paramtype serialized_module_info: str :keyword namespace: :paramtype namespace: str :keyword tags: A set of tags. :paramtype tags: list[str] :keyword display_name: :paramtype display_name: str :keyword dict_tags: Dictionary of :code:`<string>`. :paramtype dict_tags: dict[str, str] :keyword module_version_id: :paramtype module_version_id: str :keyword feed_name: :paramtype feed_name: str :keyword registry_name: :paramtype registry_name: str :keyword module_name: :paramtype module_name: str :keyword module_version: :paramtype module_version: str :keyword description: :paramtype description: str :keyword owner: :paramtype owner: str :keyword job_type: :paramtype job_type: str :keyword default_version: :paramtype default_version: str :keyword family_id: :paramtype family_id: str :keyword help_document: :paramtype help_document: str :keyword codegen_by: :paramtype codegen_by: str :keyword arm_id: :paramtype arm_id: str :keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step", "Draft", "Feed", "Registry", "SystemAutoCreated". :paramtype module_scope: str or ~flow.models.ModuleScope :keyword module_entity: :paramtype module_entity: ~flow.models.ModuleEntity :keyword input_types: :paramtype input_types: list[str] :keyword output_types: :paramtype output_types: list[str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword yaml_link: :paramtype yaml_link: str :keyword yaml_link_with_commit_sha: :paramtype yaml_link_with_commit_sha: str :keyword module_source_type: Possible values include: "Unknown", "Local", "GithubFile", "GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo". :paramtype module_source_type: str or ~flow.models.ModuleSourceType :keyword registered_by: :paramtype registered_by: str :keyword versions: :paramtype versions: list[~flow.models.AzureMLModuleVersionDescriptor] :keyword is_default_module_version: :paramtype is_default_module_version: bool :keyword system_data: :paramtype system_data: ~flow.models.SystemData :keyword system_meta: :paramtype system_meta: ~flow.models.SystemMeta :keyword snapshot_id: :paramtype snapshot_id: str :keyword entry: :paramtype entry: str :keyword os_type: :paramtype os_type: str :keyword require_gpu: :paramtype require_gpu: bool :keyword module_python_interface: :paramtype module_python_interface: ~flow.models.ModulePythonInterface :keyword environment_asset_id: :paramtype environment_asset_id: str :keyword run_setting_parameters: :paramtype run_setting_parameters: list[~flow.models.RunSettingParameter] :keyword supported_ui_input_data_delivery_modes: Dictionary of <componentsΒ·8o5zajΒ·schemasΒ·moduledtowithvalidatestatusΒ·propertiesΒ·supporteduiinputdatadeliverymodesΒ·additionalproperties>. :paramtype supported_ui_input_data_delivery_modes: dict[str, list[str or ~flow.models.UIInputDataDeliveryMode]] :keyword output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`. :paramtype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec] :keyword yaml_str: :paramtype yaml_str: str """ super(ModuleDtoWithValidateStatus, self).__init__(**kwargs) self.existing_module_entity = existing_module_entity self.status = status self.status_details = status_details self.error_details = error_details self.serialized_module_info = serialized_module_info self.namespace = namespace self.tags = tags self.display_name = display_name self.dict_tags = dict_tags self.module_version_id = module_version_id self.feed_name = feed_name self.registry_name = registry_name self.module_name = module_name self.module_version = module_version self.description = description self.owner = owner self.job_type = job_type self.default_version = default_version self.family_id = family_id self.help_document = help_document self.codegen_by = codegen_by self.arm_id = arm_id self.module_scope = module_scope self.module_entity = module_entity self.input_types = input_types self.output_types = output_types self.entity_status = entity_status self.created_date = created_date self.last_modified_date = last_modified_date self.yaml_link = yaml_link self.yaml_link_with_commit_sha = yaml_link_with_commit_sha self.module_source_type = module_source_type self.registered_by = registered_by self.versions = versions self.is_default_module_version = is_default_module_version self.system_data = system_data self.system_meta = system_meta self.snapshot_id = snapshot_id self.entry = entry self.os_type = os_type self.require_gpu = require_gpu self.module_python_interface = module_python_interface self.environment_asset_id = environment_asset_id self.run_setting_parameters = run_setting_parameters self.supported_ui_input_data_delivery_modes = supported_ui_input_data_delivery_modes self.output_setting_specs = output_setting_specs self.yaml_str = yaml_str class ModuleEntity(msrest.serialization.Model): """ModuleEntity. :ivar display_name: :vartype display_name: str :ivar module_execution_type: :vartype module_execution_type: str :ivar module_type: Possible values include: "None", "BatchInferencing". :vartype module_type: str or ~flow.models.ModuleType :ivar module_type_version: :vartype module_type_version: str :ivar upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed". :vartype upload_state: str or ~flow.models.UploadState :ivar is_deterministic: :vartype is_deterministic: bool :ivar structured_interface: :vartype structured_interface: ~flow.models.StructuredInterface :ivar data_location: :vartype data_location: ~flow.models.DataLocation :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar created_by: :vartype created_by: ~flow.models.CreatedBy :ivar last_updated_by: :vartype last_updated_by: ~flow.models.CreatedBy :ivar runconfig: :vartype runconfig: str :ivar cloud_settings: :vartype cloud_settings: ~flow.models.CloudSettings :ivar category: :vartype category: str :ivar step_type: :vartype step_type: str :ivar stage: :vartype stage: str :ivar name: :vartype name: str :ivar hash: :vartype hash: str :ivar description: :vartype description: str :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'display_name': {'key': 'displayName', 'type': 'str'}, 'module_execution_type': {'key': 'moduleExecutionType', 'type': 'str'}, 'module_type': {'key': 'moduleType', 'type': 'str'}, 'module_type_version': {'key': 'moduleTypeVersion', 'type': 'str'}, 'upload_state': {'key': 'uploadState', 'type': 'str'}, 'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'}, 'structured_interface': {'key': 'structuredInterface', 'type': 'StructuredInterface'}, 'data_location': {'key': 'dataLocation', 'type': 'DataLocation'}, 'identifier_hash': {'key': 'identifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'identifierHashV2', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'created_by': {'key': 'createdBy', 'type': 'CreatedBy'}, 'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'}, 'runconfig': {'key': 'runconfig', 'type': 'str'}, 'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'}, 'category': {'key': 'category', 'type': 'str'}, 'step_type': {'key': 'stepType', 'type': 'str'}, 'stage': {'key': 'stage', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'hash': {'key': 'hash', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, display_name: Optional[str] = None, module_execution_type: Optional[str] = None, module_type: Optional[Union[str, "ModuleType"]] = None, module_type_version: Optional[str] = None, upload_state: Optional[Union[str, "UploadState"]] = None, is_deterministic: Optional[bool] = None, structured_interface: Optional["StructuredInterface"] = None, data_location: Optional["DataLocation"] = None, identifier_hash: Optional[str] = None, identifier_hash_v2: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, created_by: Optional["CreatedBy"] = None, last_updated_by: Optional["CreatedBy"] = None, runconfig: Optional[str] = None, cloud_settings: Optional["CloudSettings"] = None, category: Optional[str] = None, step_type: Optional[str] = None, stage: Optional[str] = None, name: Optional[str] = None, hash: Optional[str] = None, description: Optional[str] = None, entity_status: Optional[Union[str, "EntityStatus"]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword display_name: :paramtype display_name: str :keyword module_execution_type: :paramtype module_execution_type: str :keyword module_type: Possible values include: "None", "BatchInferencing". :paramtype module_type: str or ~flow.models.ModuleType :keyword module_type_version: :paramtype module_type_version: str :keyword upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed". :paramtype upload_state: str or ~flow.models.UploadState :keyword is_deterministic: :paramtype is_deterministic: bool :keyword structured_interface: :paramtype structured_interface: ~flow.models.StructuredInterface :keyword data_location: :paramtype data_location: ~flow.models.DataLocation :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword created_by: :paramtype created_by: ~flow.models.CreatedBy :keyword last_updated_by: :paramtype last_updated_by: ~flow.models.CreatedBy :keyword runconfig: :paramtype runconfig: str :keyword cloud_settings: :paramtype cloud_settings: ~flow.models.CloudSettings :keyword category: :paramtype category: str :keyword step_type: :paramtype step_type: str :keyword stage: :paramtype stage: str :keyword name: :paramtype name: str :keyword hash: :paramtype hash: str :keyword description: :paramtype description: str :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(ModuleEntity, self).__init__(**kwargs) self.display_name = display_name self.module_execution_type = module_execution_type self.module_type = module_type self.module_type_version = module_type_version self.upload_state = upload_state self.is_deterministic = is_deterministic self.structured_interface = structured_interface self.data_location = data_location self.identifier_hash = identifier_hash self.identifier_hash_v2 = identifier_hash_v2 self.tags = tags self.properties = properties self.created_by = created_by self.last_updated_by = last_updated_by self.runconfig = runconfig self.cloud_settings = cloud_settings self.category = category self.step_type = step_type self.stage = stage self.name = name self.hash = hash self.description = description self.entity_status = entity_status self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class ModulePythonInterface(msrest.serialization.Model): """ModulePythonInterface. :ivar inputs: :vartype inputs: list[~flow.models.PythonInterfaceMapping] :ivar outputs: :vartype outputs: list[~flow.models.PythonInterfaceMapping] :ivar parameters: :vartype parameters: list[~flow.models.PythonInterfaceMapping] """ _attribute_map = { 'inputs': {'key': 'inputs', 'type': '[PythonInterfaceMapping]'}, 'outputs': {'key': 'outputs', 'type': '[PythonInterfaceMapping]'}, 'parameters': {'key': 'parameters', 'type': '[PythonInterfaceMapping]'}, } def __init__( self, *, inputs: Optional[List["PythonInterfaceMapping"]] = None, outputs: Optional[List["PythonInterfaceMapping"]] = None, parameters: Optional[List["PythonInterfaceMapping"]] = None, **kwargs ): """ :keyword inputs: :paramtype inputs: list[~flow.models.PythonInterfaceMapping] :keyword outputs: :paramtype outputs: list[~flow.models.PythonInterfaceMapping] :keyword parameters: :paramtype parameters: list[~flow.models.PythonInterfaceMapping] """ super(ModulePythonInterface, self).__init__(**kwargs) self.inputs = inputs self.outputs = outputs self.parameters = parameters class MpiConfiguration(msrest.serialization.Model): """MpiConfiguration. :ivar process_count_per_node: :vartype process_count_per_node: int """ _attribute_map = { 'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'}, } def __init__( self, *, process_count_per_node: Optional[int] = None, **kwargs ): """ :keyword process_count_per_node: :paramtype process_count_per_node: int """ super(MpiConfiguration, self).__init__(**kwargs) self.process_count_per_node = process_count_per_node class NCrossValidations(msrest.serialization.Model): """NCrossValidations. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.NCrossValidationMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, *, mode: Optional[Union[str, "NCrossValidationMode"]] = None, value: Optional[int] = None, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.NCrossValidationMode :keyword value: :paramtype value: int """ super(NCrossValidations, self).__init__(**kwargs) self.mode = mode self.value = value class Node(msrest.serialization.Model): """Node. :ivar name: :vartype name: str :ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm", "csharp". :vartype type: str or ~flow.models.ToolType :ivar source: :vartype source: ~flow.models.NodeSource :ivar inputs: Dictionary of :code:`<any>`. :vartype inputs: dict[str, any] :ivar tool: :vartype tool: str :ivar reduce: :vartype reduce: bool :ivar activate: :vartype activate: ~flow.models.Activate :ivar comment: :vartype comment: str :ivar api: :vartype api: str :ivar provider: :vartype provider: str :ivar connection: :vartype connection: str :ivar module: :vartype module: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'source': {'key': 'source', 'type': 'NodeSource'}, 'inputs': {'key': 'inputs', 'type': '{object}'}, 'tool': {'key': 'tool', 'type': 'str'}, 'reduce': {'key': 'reduce', 'type': 'bool'}, 'activate': {'key': 'activate', 'type': 'Activate'}, 'comment': {'key': 'comment', 'type': 'str'}, 'api': {'key': 'api', 'type': 'str'}, 'provider': {'key': 'provider', 'type': 'str'}, 'connection': {'key': 'connection', 'type': 'str'}, 'module': {'key': 'module', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, type: Optional[Union[str, "ToolType"]] = None, source: Optional["NodeSource"] = None, inputs: Optional[Dict[str, Any]] = None, tool: Optional[str] = None, reduce: Optional[bool] = None, activate: Optional["Activate"] = None, comment: Optional[str] = None, api: Optional[str] = None, provider: Optional[str] = None, connection: Optional[str] = None, module: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm", "csharp". :paramtype type: str or ~flow.models.ToolType :keyword source: :paramtype source: ~flow.models.NodeSource :keyword inputs: Dictionary of :code:`<any>`. :paramtype inputs: dict[str, any] :keyword tool: :paramtype tool: str :keyword reduce: :paramtype reduce: bool :keyword activate: :paramtype activate: ~flow.models.Activate :keyword comment: :paramtype comment: str :keyword api: :paramtype api: str :keyword provider: :paramtype provider: str :keyword connection: :paramtype connection: str :keyword module: :paramtype module: str """ super(Node, self).__init__(**kwargs) self.name = name self.type = type self.source = source self.inputs = inputs self.tool = tool self.reduce = reduce self.activate = activate self.comment = comment self.api = api self.provider = provider self.connection = connection self.module = module class NodeInputPort(msrest.serialization.Model): """NodeInputPort. :ivar name: :vartype name: str :ivar documentation: :vartype documentation: str :ivar data_types_ids: :vartype data_types_ids: list[str] :ivar is_optional: :vartype is_optional: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'documentation': {'key': 'documentation', 'type': 'str'}, 'data_types_ids': {'key': 'dataTypesIds', 'type': '[str]'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, } def __init__( self, *, name: Optional[str] = None, documentation: Optional[str] = None, data_types_ids: Optional[List[str]] = None, is_optional: Optional[bool] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword documentation: :paramtype documentation: str :keyword data_types_ids: :paramtype data_types_ids: list[str] :keyword is_optional: :paramtype is_optional: bool """ super(NodeInputPort, self).__init__(**kwargs) self.name = name self.documentation = documentation self.data_types_ids = data_types_ids self.is_optional = is_optional class NodeLayout(msrest.serialization.Model): """NodeLayout. :ivar x: :vartype x: float :ivar y: :vartype y: float :ivar width: :vartype width: float :ivar height: :vartype height: float :ivar extended_data: :vartype extended_data: str """ _attribute_map = { 'x': {'key': 'x', 'type': 'float'}, 'y': {'key': 'y', 'type': 'float'}, 'width': {'key': 'width', 'type': 'float'}, 'height': {'key': 'height', 'type': 'float'}, 'extended_data': {'key': 'extendedData', 'type': 'str'}, } def __init__( self, *, x: Optional[float] = None, y: Optional[float] = None, width: Optional[float] = None, height: Optional[float] = None, extended_data: Optional[str] = None, **kwargs ): """ :keyword x: :paramtype x: float :keyword y: :paramtype y: float :keyword width: :paramtype width: float :keyword height: :paramtype height: float :keyword extended_data: :paramtype extended_data: str """ super(NodeLayout, self).__init__(**kwargs) self.x = x self.y = y self.width = width self.height = height self.extended_data = extended_data class NodeOutputPort(msrest.serialization.Model): """NodeOutputPort. :ivar name: :vartype name: str :ivar documentation: :vartype documentation: str :ivar data_type_id: :vartype data_type_id: str :ivar pass_through_input_name: :vartype pass_through_input_name: str :ivar early_available: :vartype early_available: bool :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'documentation': {'key': 'documentation', 'type': 'str'}, 'data_type_id': {'key': 'dataTypeId', 'type': 'str'}, 'pass_through_input_name': {'key': 'passThroughInputName', 'type': 'str'}, 'early_available': {'key': 'EarlyAvailable', 'type': 'bool'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, documentation: Optional[str] = None, data_type_id: Optional[str] = None, pass_through_input_name: Optional[str] = None, early_available: Optional[bool] = None, data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword documentation: :paramtype documentation: str :keyword data_type_id: :paramtype data_type_id: str :keyword pass_through_input_name: :paramtype pass_through_input_name: str :keyword early_available: :paramtype early_available: bool :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode """ super(NodeOutputPort, self).__init__(**kwargs) self.name = name self.documentation = documentation self.data_type_id = data_type_id self.pass_through_input_name = pass_through_input_name self.early_available = early_available self.data_store_mode = data_store_mode class NodePortInterface(msrest.serialization.Model): """NodePortInterface. :ivar inputs: :vartype inputs: list[~flow.models.NodeInputPort] :ivar outputs: :vartype outputs: list[~flow.models.NodeOutputPort] :ivar control_outputs: :vartype control_outputs: list[~flow.models.ControlOutput] """ _attribute_map = { 'inputs': {'key': 'inputs', 'type': '[NodeInputPort]'}, 'outputs': {'key': 'outputs', 'type': '[NodeOutputPort]'}, 'control_outputs': {'key': 'controlOutputs', 'type': '[ControlOutput]'}, } def __init__( self, *, inputs: Optional[List["NodeInputPort"]] = None, outputs: Optional[List["NodeOutputPort"]] = None, control_outputs: Optional[List["ControlOutput"]] = None, **kwargs ): """ :keyword inputs: :paramtype inputs: list[~flow.models.NodeInputPort] :keyword outputs: :paramtype outputs: list[~flow.models.NodeOutputPort] :keyword control_outputs: :paramtype control_outputs: list[~flow.models.ControlOutput] """ super(NodePortInterface, self).__init__(**kwargs) self.inputs = inputs self.outputs = outputs self.control_outputs = control_outputs class Nodes(msrest.serialization.Model): """Nodes. All required parameters must be populated in order to send to Azure. :ivar nodes_value_type: Required. Possible values include: "All", "Custom". :vartype nodes_value_type: str or ~flow.models.NodesValueType :ivar values: :vartype values: list[int] """ _validation = { 'nodes_value_type': {'required': True}, } _attribute_map = { 'nodes_value_type': {'key': 'nodes_value_type', 'type': 'str'}, 'values': {'key': 'values', 'type': '[int]'}, } def __init__( self, *, nodes_value_type: Union[str, "NodesValueType"], values: Optional[List[int]] = None, **kwargs ): """ :keyword nodes_value_type: Required. Possible values include: "All", "Custom". :paramtype nodes_value_type: str or ~flow.models.NodesValueType :keyword values: :paramtype values: list[int] """ super(Nodes, self).__init__(**kwargs) self.nodes_value_type = nodes_value_type self.values = values class NodeSource(msrest.serialization.Model): """NodeSource. :ivar type: :vartype type: str :ivar tool: :vartype tool: str :ivar path: :vartype path: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'tool': {'key': 'tool', 'type': 'str'}, 'path': {'key': 'path', 'type': 'str'}, } def __init__( self, *, type: Optional[str] = None, tool: Optional[str] = None, path: Optional[str] = None, **kwargs ): """ :keyword type: :paramtype type: str :keyword tool: :paramtype tool: str :keyword path: :paramtype path: str """ super(NodeSource, self).__init__(**kwargs) self.type = type self.tool = tool self.path = path class NodeTelemetryMetaInfo(msrest.serialization.Model): """NodeTelemetryMetaInfo. :ivar pipeline_run_id: :vartype pipeline_run_id: str :ivar node_id: :vartype node_id: str :ivar version_id: :vartype version_id: str :ivar node_type: :vartype node_type: str :ivar node_source: :vartype node_source: str :ivar is_anonymous: :vartype is_anonymous: bool :ivar is_pipeline_component: :vartype is_pipeline_component: bool """ _attribute_map = { 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, 'node_id': {'key': 'nodeId', 'type': 'str'}, 'version_id': {'key': 'versionId', 'type': 'str'}, 'node_type': {'key': 'nodeType', 'type': 'str'}, 'node_source': {'key': 'nodeSource', 'type': 'str'}, 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, 'is_pipeline_component': {'key': 'isPipelineComponent', 'type': 'bool'}, } def __init__( self, *, pipeline_run_id: Optional[str] = None, node_id: Optional[str] = None, version_id: Optional[str] = None, node_type: Optional[str] = None, node_source: Optional[str] = None, is_anonymous: Optional[bool] = None, is_pipeline_component: Optional[bool] = None, **kwargs ): """ :keyword pipeline_run_id: :paramtype pipeline_run_id: str :keyword node_id: :paramtype node_id: str :keyword version_id: :paramtype version_id: str :keyword node_type: :paramtype node_type: str :keyword node_source: :paramtype node_source: str :keyword is_anonymous: :paramtype is_anonymous: bool :keyword is_pipeline_component: :paramtype is_pipeline_component: bool """ super(NodeTelemetryMetaInfo, self).__init__(**kwargs) self.pipeline_run_id = pipeline_run_id self.node_id = node_id self.version_id = version_id self.node_type = node_type self.node_source = node_source self.is_anonymous = is_anonymous self.is_pipeline_component = is_pipeline_component class NodeVariant(msrest.serialization.Model): """NodeVariant. :ivar variants: This is a dictionary. :vartype variants: dict[str, ~flow.models.VariantNode] :ivar default_variant_id: :vartype default_variant_id: str """ _attribute_map = { 'variants': {'key': 'variants', 'type': '{VariantNode}'}, 'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'}, } def __init__( self, *, variants: Optional[Dict[str, "VariantNode"]] = None, default_variant_id: Optional[str] = None, **kwargs ): """ :keyword variants: This is a dictionary. :paramtype variants: dict[str, ~flow.models.VariantNode] :keyword default_variant_id: :paramtype default_variant_id: str """ super(NodeVariant, self).__init__(**kwargs) self.variants = variants self.default_variant_id = default_variant_id class NoteBookTaskDto(msrest.serialization.Model): """NoteBookTaskDto. :ivar notebook_path: :vartype notebook_path: str :ivar base_parameters: Dictionary of :code:`<string>`. :vartype base_parameters: dict[str, str] """ _attribute_map = { 'notebook_path': {'key': 'notebook_path', 'type': 'str'}, 'base_parameters': {'key': 'base_parameters', 'type': '{str}'}, } def __init__( self, *, notebook_path: Optional[str] = None, base_parameters: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword notebook_path: :paramtype notebook_path: str :keyword base_parameters: Dictionary of :code:`<string>`. :paramtype base_parameters: dict[str, str] """ super(NoteBookTaskDto, self).__init__(**kwargs) self.notebook_path = notebook_path self.base_parameters = base_parameters class NotificationSetting(msrest.serialization.Model): """NotificationSetting. :ivar emails: :vartype emails: list[str] :ivar email_on: :vartype email_on: list[str or ~flow.models.EmailNotificationEnableType] :ivar webhooks: Dictionary of :code:`<Webhook>`. :vartype webhooks: dict[str, ~flow.models.Webhook] """ _attribute_map = { 'emails': {'key': 'emails', 'type': '[str]'}, 'email_on': {'key': 'emailOn', 'type': '[str]'}, 'webhooks': {'key': 'webhooks', 'type': '{Webhook}'}, } def __init__( self, *, emails: Optional[List[str]] = None, email_on: Optional[List[Union[str, "EmailNotificationEnableType"]]] = None, webhooks: Optional[Dict[str, "Webhook"]] = None, **kwargs ): """ :keyword emails: :paramtype emails: list[str] :keyword email_on: :paramtype email_on: list[str or ~flow.models.EmailNotificationEnableType] :keyword webhooks: Dictionary of :code:`<Webhook>`. :paramtype webhooks: dict[str, ~flow.models.Webhook] """ super(NotificationSetting, self).__init__(**kwargs) self.emails = emails self.email_on = email_on self.webhooks = webhooks class ODataError(msrest.serialization.Model): """Represents OData v4 error object. :ivar code: Gets or sets a language-independent, service-defined error code. This code serves as a sub-status for the HTTP error code specified in the response. :vartype code: str :ivar message: Gets or sets a human-readable, language-dependent representation of the error. The ``Content-Language`` header MUST contain the language code from [RFC5646] corresponding to the language in which the value for message is written. :vartype message: str :ivar target: Gets or sets the target of the particular error (for example, the name of the property in error). :vartype target: str :ivar details: Gets or sets additional details about the error. :vartype details: list[~flow.models.ODataErrorDetail] :ivar innererror: The contents of this object are service-defined. Usually this object contains information that will help debug the service and SHOULD only be used in development environments in order to guard against potential security concerns around information disclosure. :vartype innererror: ~flow.models.ODataInnerError """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[ODataErrorDetail]'}, 'innererror': {'key': 'innererror', 'type': 'ODataInnerError'}, } def __init__( self, *, code: Optional[str] = None, message: Optional[str] = None, target: Optional[str] = None, details: Optional[List["ODataErrorDetail"]] = None, innererror: Optional["ODataInnerError"] = None, **kwargs ): """ :keyword code: Gets or sets a language-independent, service-defined error code. This code serves as a sub-status for the HTTP error code specified in the response. :paramtype code: str :keyword message: Gets or sets a human-readable, language-dependent representation of the error. The ``Content-Language`` header MUST contain the language code from [RFC5646] corresponding to the language in which the value for message is written. :paramtype message: str :keyword target: Gets or sets the target of the particular error (for example, the name of the property in error). :paramtype target: str :keyword details: Gets or sets additional details about the error. :paramtype details: list[~flow.models.ODataErrorDetail] :keyword innererror: The contents of this object are service-defined. Usually this object contains information that will help debug the service and SHOULD only be used in development environments in order to guard against potential security concerns around information disclosure. :paramtype innererror: ~flow.models.ODataInnerError """ super(ODataError, self).__init__(**kwargs) self.code = code self.message = message self.target = target self.details = details self.innererror = innererror class ODataErrorDetail(msrest.serialization.Model): """Represents additional error details. :ivar code: Gets or sets a language-independent, service-defined error code. :vartype code: str :ivar message: Gets or sets a human-readable, language-dependent representation of the error. :vartype message: str :ivar target: Gets or sets the target of the particular error (for example, the name of the property in error). :vartype target: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, } def __init__( self, *, code: Optional[str] = None, message: Optional[str] = None, target: Optional[str] = None, **kwargs ): """ :keyword code: Gets or sets a language-independent, service-defined error code. :paramtype code: str :keyword message: Gets or sets a human-readable, language-dependent representation of the error. :paramtype message: str :keyword target: Gets or sets the target of the particular error (for example, the name of the property in error). :paramtype target: str """ super(ODataErrorDetail, self).__init__(**kwargs) self.code = code self.message = message self.target = target class ODataErrorResponse(msrest.serialization.Model): """Represents OData v4 compliant error response message. :ivar error: Represents OData v4 error object. :vartype error: ~flow.models.ODataError """ _attribute_map = { 'error': {'key': 'error', 'type': 'ODataError'}, } def __init__( self, *, error: Optional["ODataError"] = None, **kwargs ): """ :keyword error: Represents OData v4 error object. :paramtype error: ~flow.models.ODataError """ super(ODataErrorResponse, self).__init__(**kwargs) self.error = error class ODataInnerError(msrest.serialization.Model): """The contents of this object are service-defined. Usually this object contains information that will help debug the service and SHOULD only be used in development environments in order to guard against potential security concerns around information disclosure. :ivar client_request_id: Gets or sets the client provided request ID. :vartype client_request_id: str :ivar service_request_id: Gets or sets the server generated request ID. :vartype service_request_id: str :ivar trace: Gets or sets the exception stack trace. DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT. :vartype trace: str :ivar context: Gets or sets additional context for the exception. DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT. :vartype context: str """ _attribute_map = { 'client_request_id': {'key': 'clientRequestId', 'type': 'str'}, 'service_request_id': {'key': 'serviceRequestId', 'type': 'str'}, 'trace': {'key': 'trace', 'type': 'str'}, 'context': {'key': 'context', 'type': 'str'}, } def __init__( self, *, client_request_id: Optional[str] = None, service_request_id: Optional[str] = None, trace: Optional[str] = None, context: Optional[str] = None, **kwargs ): """ :keyword client_request_id: Gets or sets the client provided request ID. :paramtype client_request_id: str :keyword service_request_id: Gets or sets the server generated request ID. :paramtype service_request_id: str :keyword trace: Gets or sets the exception stack trace. DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT. :paramtype trace: str :keyword context: Gets or sets additional context for the exception. DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT. :paramtype context: str """ super(ODataInnerError, self).__init__(**kwargs) self.client_request_id = client_request_id self.service_request_id = service_request_id self.trace = trace self.context = context class OutputData(msrest.serialization.Model): """OutputData. :ivar output_location: :vartype output_location: ~flow.models.ExecutionDataLocation :ivar mechanism: Possible values include: "Upload", "Mount", "Hdfs", "Link", "Direct". :vartype mechanism: str or ~flow.models.OutputMechanism :ivar additional_options: :vartype additional_options: ~flow.models.OutputOptions :ivar environment_variable_name: :vartype environment_variable_name: str """ _attribute_map = { 'output_location': {'key': 'outputLocation', 'type': 'ExecutionDataLocation'}, 'mechanism': {'key': 'mechanism', 'type': 'str'}, 'additional_options': {'key': 'additionalOptions', 'type': 'OutputOptions'}, 'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'}, } def __init__( self, *, output_location: Optional["ExecutionDataLocation"] = None, mechanism: Optional[Union[str, "OutputMechanism"]] = None, additional_options: Optional["OutputOptions"] = None, environment_variable_name: Optional[str] = None, **kwargs ): """ :keyword output_location: :paramtype output_location: ~flow.models.ExecutionDataLocation :keyword mechanism: Possible values include: "Upload", "Mount", "Hdfs", "Link", "Direct". :paramtype mechanism: str or ~flow.models.OutputMechanism :keyword additional_options: :paramtype additional_options: ~flow.models.OutputOptions :keyword environment_variable_name: :paramtype environment_variable_name: str """ super(OutputData, self).__init__(**kwargs) self.output_location = output_location self.mechanism = mechanism self.additional_options = additional_options self.environment_variable_name = environment_variable_name class OutputDataBinding(msrest.serialization.Model): """OutputDataBinding. :ivar datastore_id: :vartype datastore_id: str :ivar path_on_datastore: :vartype path_on_datastore: str :ivar path_on_compute: :vartype path_on_compute: str :ivar description: :vartype description: str :ivar uri: :vartype uri: ~flow.models.MfeInternalUriReference :ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :vartype mode: str or ~flow.models.DataBindingMode :ivar asset_uri: :vartype asset_uri: str :ivar is_asset_job_output: :vartype is_asset_job_output: bool :ivar job_output_type: Possible values include: "Uri", "Dataset", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :vartype job_output_type: str or ~flow.models.JobOutputType :ivar asset_name: :vartype asset_name: str :ivar asset_version: :vartype asset_version: str :ivar auto_delete_setting: :vartype auto_delete_setting: ~flow.models.AutoDeleteSetting """ _attribute_map = { 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, 'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'MfeInternalUriReference'}, 'mode': {'key': 'mode', 'type': 'str'}, 'asset_uri': {'key': 'assetUri', 'type': 'str'}, 'is_asset_job_output': {'key': 'isAssetJobOutput', 'type': 'bool'}, 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, 'asset_name': {'key': 'assetName', 'type': 'str'}, 'asset_version': {'key': 'assetVersion', 'type': 'str'}, 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, } def __init__( self, *, datastore_id: Optional[str] = None, path_on_datastore: Optional[str] = None, path_on_compute: Optional[str] = None, description: Optional[str] = None, uri: Optional["MfeInternalUriReference"] = None, mode: Optional[Union[str, "DataBindingMode"]] = None, asset_uri: Optional[str] = None, is_asset_job_output: Optional[bool] = None, job_output_type: Optional[Union[str, "JobOutputType"]] = None, asset_name: Optional[str] = None, asset_version: Optional[str] = None, auto_delete_setting: Optional["AutoDeleteSetting"] = None, **kwargs ): """ :keyword datastore_id: :paramtype datastore_id: str :keyword path_on_datastore: :paramtype path_on_datastore: str :keyword path_on_compute: :paramtype path_on_compute: str :keyword description: :paramtype description: str :keyword uri: :paramtype uri: ~flow.models.MfeInternalUriReference :keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount", "ReadWriteMount", "Direct", "EvalMount", "EvalDownload". :paramtype mode: str or ~flow.models.DataBindingMode :keyword asset_uri: :paramtype asset_uri: str :keyword is_asset_job_output: :paramtype is_asset_job_output: bool :keyword job_output_type: Possible values include: "Uri", "Dataset", "UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel". :paramtype job_output_type: str or ~flow.models.JobOutputType :keyword asset_name: :paramtype asset_name: str :keyword asset_version: :paramtype asset_version: str :keyword auto_delete_setting: :paramtype auto_delete_setting: ~flow.models.AutoDeleteSetting """ super(OutputDataBinding, self).__init__(**kwargs) self.datastore_id = datastore_id self.path_on_datastore = path_on_datastore self.path_on_compute = path_on_compute self.description = description self.uri = uri self.mode = mode self.asset_uri = asset_uri self.is_asset_job_output = is_asset_job_output self.job_output_type = job_output_type self.asset_name = asset_name self.asset_version = asset_version self.auto_delete_setting = auto_delete_setting class OutputDatasetLineage(msrest.serialization.Model): """OutputDatasetLineage. :ivar identifier: :vartype identifier: ~flow.models.DatasetIdentifier :ivar output_type: Possible values include: "RunOutput", "Reference". :vartype output_type: str or ~flow.models.DatasetOutputType :ivar output_details: :vartype output_details: ~flow.models.DatasetOutputDetails """ _attribute_map = { 'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'}, 'output_type': {'key': 'outputType', 'type': 'str'}, 'output_details': {'key': 'outputDetails', 'type': 'DatasetOutputDetails'}, } def __init__( self, *, identifier: Optional["DatasetIdentifier"] = None, output_type: Optional[Union[str, "DatasetOutputType"]] = None, output_details: Optional["DatasetOutputDetails"] = None, **kwargs ): """ :keyword identifier: :paramtype identifier: ~flow.models.DatasetIdentifier :keyword output_type: Possible values include: "RunOutput", "Reference". :paramtype output_type: str or ~flow.models.DatasetOutputType :keyword output_details: :paramtype output_details: ~flow.models.DatasetOutputDetails """ super(OutputDatasetLineage, self).__init__(**kwargs) self.identifier = identifier self.output_type = output_type self.output_details = output_details class OutputDefinition(msrest.serialization.Model): """OutputDefinition. :ivar name: :vartype name: str :ivar type: :vartype type: list[str or ~flow.models.ValueType] :ivar description: :vartype description: str :ivar is_property: :vartype is_property: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': '[str]'}, 'description': {'key': 'description', 'type': 'str'}, 'is_property': {'key': 'isProperty', 'type': 'bool'}, } def __init__( self, *, name: Optional[str] = None, type: Optional[List[Union[str, "ValueType"]]] = None, description: Optional[str] = None, is_property: Optional[bool] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: :paramtype type: list[str or ~flow.models.ValueType] :keyword description: :paramtype description: str :keyword is_property: :paramtype is_property: bool """ super(OutputDefinition, self).__init__(**kwargs) self.name = name self.type = type self.description = description self.is_property = is_property class OutputOptions(msrest.serialization.Model): """OutputOptions. :ivar path_on_compute: :vartype path_on_compute: str :ivar registration_options: :vartype registration_options: ~flow.models.RegistrationOptions :ivar upload_options: :vartype upload_options: ~flow.models.UploadOptions :ivar mount_options: Dictionary of :code:`<string>`. :vartype mount_options: dict[str, str] """ _attribute_map = { 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'registration_options': {'key': 'registrationOptions', 'type': 'RegistrationOptions'}, 'upload_options': {'key': 'uploadOptions', 'type': 'UploadOptions'}, 'mount_options': {'key': 'mountOptions', 'type': '{str}'}, } def __init__( self, *, path_on_compute: Optional[str] = None, registration_options: Optional["RegistrationOptions"] = None, upload_options: Optional["UploadOptions"] = None, mount_options: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword path_on_compute: :paramtype path_on_compute: str :keyword registration_options: :paramtype registration_options: ~flow.models.RegistrationOptions :keyword upload_options: :paramtype upload_options: ~flow.models.UploadOptions :keyword mount_options: Dictionary of :code:`<string>`. :paramtype mount_options: dict[str, str] """ super(OutputOptions, self).__init__(**kwargs) self.path_on_compute = path_on_compute self.registration_options = registration_options self.upload_options = upload_options self.mount_options = mount_options class OutputSetting(msrest.serialization.Model): """OutputSetting. :ivar name: :vartype name: str :ivar data_store_name: :vartype data_store_name: str :ivar data_store_name_parameter_assignment: :vartype data_store_name_parameter_assignment: ~flow.models.ParameterAssignment :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode :ivar data_store_mode_parameter_assignment: :vartype data_store_mode_parameter_assignment: ~flow.models.ParameterAssignment :ivar path_on_compute: :vartype path_on_compute: str :ivar path_on_compute_parameter_assignment: :vartype path_on_compute_parameter_assignment: ~flow.models.ParameterAssignment :ivar overwrite: :vartype overwrite: bool :ivar data_reference_name: :vartype data_reference_name: str :ivar web_service_port: :vartype web_service_port: str :ivar dataset_registration: :vartype dataset_registration: ~flow.models.DatasetRegistration :ivar dataset_output_options: :vartype dataset_output_options: ~flow.models.DatasetOutputOptions :ivar asset_output_settings: :vartype asset_output_settings: ~flow.models.AssetOutputSettings :ivar parameter_name: :vartype parameter_name: str :ivar asset_output_settings_parameter_name: :vartype asset_output_settings_parameter_name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_name_parameter_assignment': {'key': 'DataStoreNameParameterAssignment', 'type': 'ParameterAssignment'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'data_store_mode_parameter_assignment': {'key': 'DataStoreModeParameterAssignment', 'type': 'ParameterAssignment'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'path_on_compute_parameter_assignment': {'key': 'PathOnComputeParameterAssignment', 'type': 'ParameterAssignment'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'}, 'web_service_port': {'key': 'webServicePort', 'type': 'str'}, 'dataset_registration': {'key': 'datasetRegistration', 'type': 'DatasetRegistration'}, 'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'DatasetOutputOptions'}, 'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AssetOutputSettings'}, 'parameter_name': {'key': 'parameterName', 'type': 'str'}, 'asset_output_settings_parameter_name': {'key': 'AssetOutputSettingsParameterName', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, data_store_name: Optional[str] = None, data_store_name_parameter_assignment: Optional["ParameterAssignment"] = None, data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None, data_store_mode_parameter_assignment: Optional["ParameterAssignment"] = None, path_on_compute: Optional[str] = None, path_on_compute_parameter_assignment: Optional["ParameterAssignment"] = None, overwrite: Optional[bool] = None, data_reference_name: Optional[str] = None, web_service_port: Optional[str] = None, dataset_registration: Optional["DatasetRegistration"] = None, dataset_output_options: Optional["DatasetOutputOptions"] = None, asset_output_settings: Optional["AssetOutputSettings"] = None, parameter_name: Optional[str] = None, asset_output_settings_parameter_name: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_name_parameter_assignment: :paramtype data_store_name_parameter_assignment: ~flow.models.ParameterAssignment :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode :keyword data_store_mode_parameter_assignment: :paramtype data_store_mode_parameter_assignment: ~flow.models.ParameterAssignment :keyword path_on_compute: :paramtype path_on_compute: str :keyword path_on_compute_parameter_assignment: :paramtype path_on_compute_parameter_assignment: ~flow.models.ParameterAssignment :keyword overwrite: :paramtype overwrite: bool :keyword data_reference_name: :paramtype data_reference_name: str :keyword web_service_port: :paramtype web_service_port: str :keyword dataset_registration: :paramtype dataset_registration: ~flow.models.DatasetRegistration :keyword dataset_output_options: :paramtype dataset_output_options: ~flow.models.DatasetOutputOptions :keyword asset_output_settings: :paramtype asset_output_settings: ~flow.models.AssetOutputSettings :keyword parameter_name: :paramtype parameter_name: str :keyword asset_output_settings_parameter_name: :paramtype asset_output_settings_parameter_name: str """ super(OutputSetting, self).__init__(**kwargs) self.name = name self.data_store_name = data_store_name self.data_store_name_parameter_assignment = data_store_name_parameter_assignment self.data_store_mode = data_store_mode self.data_store_mode_parameter_assignment = data_store_mode_parameter_assignment self.path_on_compute = path_on_compute self.path_on_compute_parameter_assignment = path_on_compute_parameter_assignment self.overwrite = overwrite self.data_reference_name = data_reference_name self.web_service_port = web_service_port self.dataset_registration = dataset_registration self.dataset_output_options = dataset_output_options self.asset_output_settings = asset_output_settings self.parameter_name = parameter_name self.asset_output_settings_parameter_name = asset_output_settings_parameter_name class OutputSettingSpec(msrest.serialization.Model): """OutputSettingSpec. :ivar supported_data_store_modes: :vartype supported_data_store_modes: list[str or ~flow.models.AEVADataStoreMode] :ivar default_asset_output_path: :vartype default_asset_output_path: str """ _attribute_map = { 'supported_data_store_modes': {'key': 'supportedDataStoreModes', 'type': '[str]'}, 'default_asset_output_path': {'key': 'defaultAssetOutputPath', 'type': 'str'}, } def __init__( self, *, supported_data_store_modes: Optional[List[Union[str, "AEVADataStoreMode"]]] = None, default_asset_output_path: Optional[str] = None, **kwargs ): """ :keyword supported_data_store_modes: :paramtype supported_data_store_modes: list[str or ~flow.models.AEVADataStoreMode] :keyword default_asset_output_path: :paramtype default_asset_output_path: str """ super(OutputSettingSpec, self).__init__(**kwargs) self.supported_data_store_modes = supported_data_store_modes self.default_asset_output_path = default_asset_output_path class PaginatedDataInfoList(msrest.serialization.Model): """A paginated list of DataInfos. :ivar value: An array of objects of type DataInfo. :vartype value: list[~flow.models.DataInfo] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[DataInfo]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, value: Optional[List["DataInfo"]] = None, continuation_token: Optional[str] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: An array of objects of type DataInfo. :paramtype value: list[~flow.models.DataInfo] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedDataInfoList, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token self.next_link = next_link class PaginatedModelDtoList(msrest.serialization.Model): """A paginated list of ModelDtos. :ivar value: An array of objects of type ModelDto. :vartype value: list[~flow.models.ModelDto] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[ModelDto]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, value: Optional[List["ModelDto"]] = None, continuation_token: Optional[str] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: An array of objects of type ModelDto. :paramtype value: list[~flow.models.ModelDto] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedModelDtoList, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token self.next_link = next_link class PaginatedModuleDtoList(msrest.serialization.Model): """A paginated list of ModuleDtos. :ivar value: An array of objects of type ModuleDto. :vartype value: list[~flow.models.ModuleDto] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[ModuleDto]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, value: Optional[List["ModuleDto"]] = None, continuation_token: Optional[str] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: An array of objects of type ModuleDto. :paramtype value: list[~flow.models.ModuleDto] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedModuleDtoList, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token self.next_link = next_link class PaginatedPipelineDraftSummaryList(msrest.serialization.Model): """A paginated list of PipelineDraftSummarys. :ivar value: An array of objects of type PipelineDraftSummary. :vartype value: list[~flow.models.PipelineDraftSummary] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[PipelineDraftSummary]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, value: Optional[List["PipelineDraftSummary"]] = None, continuation_token: Optional[str] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: An array of objects of type PipelineDraftSummary. :paramtype value: list[~flow.models.PipelineDraftSummary] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedPipelineDraftSummaryList, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token self.next_link = next_link class PaginatedPipelineEndpointSummaryList(msrest.serialization.Model): """A paginated list of PipelineEndpointSummarys. :ivar value: An array of objects of type PipelineEndpointSummary. :vartype value: list[~flow.models.PipelineEndpointSummary] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[PipelineEndpointSummary]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, value: Optional[List["PipelineEndpointSummary"]] = None, continuation_token: Optional[str] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: An array of objects of type PipelineEndpointSummary. :paramtype value: list[~flow.models.PipelineEndpointSummary] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedPipelineEndpointSummaryList, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token self.next_link = next_link class PaginatedPipelineRunSummaryList(msrest.serialization.Model): """A paginated list of PipelineRunSummarys. :ivar value: An array of objects of type PipelineRunSummary. :vartype value: list[~flow.models.PipelineRunSummary] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[PipelineRunSummary]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, value: Optional[List["PipelineRunSummary"]] = None, continuation_token: Optional[str] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: An array of objects of type PipelineRunSummary. :paramtype value: list[~flow.models.PipelineRunSummary] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedPipelineRunSummaryList, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token self.next_link = next_link class PaginatedPublishedPipelineSummaryList(msrest.serialization.Model): """A paginated list of PublishedPipelineSummarys. :ivar value: An array of objects of type PublishedPipelineSummary. :vartype value: list[~flow.models.PublishedPipelineSummary] :ivar continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :vartype continuation_token: str :ivar next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[PublishedPipelineSummary]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, value: Optional[List["PublishedPipelineSummary"]] = None, continuation_token: Optional[str] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: An array of objects of type PublishedPipelineSummary. :paramtype value: list[~flow.models.PublishedPipelineSummary] :keyword continuation_token: The token used in retrieving the next page. If null, there are no additional pages. :paramtype continuation_token: str :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. :paramtype next_link: str """ super(PaginatedPublishedPipelineSummaryList, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token self.next_link = next_link class ParallelForControlFlowInfo(msrest.serialization.Model): """ParallelForControlFlowInfo. :ivar parallel_for_items_input: :vartype parallel_for_items_input: ~flow.models.ParameterAssignment """ _attribute_map = { 'parallel_for_items_input': {'key': 'parallelForItemsInput', 'type': 'ParameterAssignment'}, } def __init__( self, *, parallel_for_items_input: Optional["ParameterAssignment"] = None, **kwargs ): """ :keyword parallel_for_items_input: :paramtype parallel_for_items_input: ~flow.models.ParameterAssignment """ super(ParallelForControlFlowInfo, self).__init__(**kwargs) self.parallel_for_items_input = parallel_for_items_input class ParallelTaskConfiguration(msrest.serialization.Model): """ParallelTaskConfiguration. :ivar max_retries_per_worker: :vartype max_retries_per_worker: int :ivar worker_count_per_node: :vartype worker_count_per_node: int :ivar terminal_exit_codes: :vartype terminal_exit_codes: list[int] :ivar configuration: Dictionary of :code:`<string>`. :vartype configuration: dict[str, str] """ _attribute_map = { 'max_retries_per_worker': {'key': 'maxRetriesPerWorker', 'type': 'int'}, 'worker_count_per_node': {'key': 'workerCountPerNode', 'type': 'int'}, 'terminal_exit_codes': {'key': 'terminalExitCodes', 'type': '[int]'}, 'configuration': {'key': 'configuration', 'type': '{str}'}, } def __init__( self, *, max_retries_per_worker: Optional[int] = None, worker_count_per_node: Optional[int] = None, terminal_exit_codes: Optional[List[int]] = None, configuration: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword max_retries_per_worker: :paramtype max_retries_per_worker: int :keyword worker_count_per_node: :paramtype worker_count_per_node: int :keyword terminal_exit_codes: :paramtype terminal_exit_codes: list[int] :keyword configuration: Dictionary of :code:`<string>`. :paramtype configuration: dict[str, str] """ super(ParallelTaskConfiguration, self).__init__(**kwargs) self.max_retries_per_worker = max_retries_per_worker self.worker_count_per_node = worker_count_per_node self.terminal_exit_codes = terminal_exit_codes self.configuration = configuration class Parameter(msrest.serialization.Model): """Parameter. :ivar name: :vartype name: str :ivar documentation: :vartype documentation: str :ivar default_value: :vartype default_value: str :ivar is_optional: :vartype is_optional: bool :ivar min_max_rules: :vartype min_max_rules: list[~flow.models.MinMaxParameterRule] :ivar enum_rules: :vartype enum_rules: list[~flow.models.EnumParameterRule] :ivar type: Possible values include: "Int", "Double", "Bool", "String", "Undefined". :vartype type: str or ~flow.models.ParameterType :ivar label: :vartype label: str :ivar group_names: :vartype group_names: list[str] :ivar argument_name: :vartype argument_name: str :ivar ui_hint: :vartype ui_hint: ~flow.models.UIParameterHint """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'documentation': {'key': 'documentation', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'min_max_rules': {'key': 'minMaxRules', 'type': '[MinMaxParameterRule]'}, 'enum_rules': {'key': 'enumRules', 'type': '[EnumParameterRule]'}, 'type': {'key': 'type', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'group_names': {'key': 'groupNames', 'type': '[str]'}, 'argument_name': {'key': 'argumentName', 'type': 'str'}, 'ui_hint': {'key': 'uiHint', 'type': 'UIParameterHint'}, } def __init__( self, *, name: Optional[str] = None, documentation: Optional[str] = None, default_value: Optional[str] = None, is_optional: Optional[bool] = None, min_max_rules: Optional[List["MinMaxParameterRule"]] = None, enum_rules: Optional[List["EnumParameterRule"]] = None, type: Optional[Union[str, "ParameterType"]] = None, label: Optional[str] = None, group_names: Optional[List[str]] = None, argument_name: Optional[str] = None, ui_hint: Optional["UIParameterHint"] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword documentation: :paramtype documentation: str :keyword default_value: :paramtype default_value: str :keyword is_optional: :paramtype is_optional: bool :keyword min_max_rules: :paramtype min_max_rules: list[~flow.models.MinMaxParameterRule] :keyword enum_rules: :paramtype enum_rules: list[~flow.models.EnumParameterRule] :keyword type: Possible values include: "Int", "Double", "Bool", "String", "Undefined". :paramtype type: str or ~flow.models.ParameterType :keyword label: :paramtype label: str :keyword group_names: :paramtype group_names: list[str] :keyword argument_name: :paramtype argument_name: str :keyword ui_hint: :paramtype ui_hint: ~flow.models.UIParameterHint """ super(Parameter, self).__init__(**kwargs) self.name = name self.documentation = documentation self.default_value = default_value self.is_optional = is_optional self.min_max_rules = min_max_rules self.enum_rules = enum_rules self.type = type self.label = label self.group_names = group_names self.argument_name = argument_name self.ui_hint = ui_hint class ParameterAssignment(msrest.serialization.Model): """ParameterAssignment. :ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition". :vartype value_type: str or ~flow.models.ParameterValueType :ivar assignments_to_concatenate: :vartype assignments_to_concatenate: list[~flow.models.ParameterAssignment] :ivar data_path_assignment: :vartype data_path_assignment: ~flow.models.LegacyDataPath :ivar data_set_definition_value_assignment: :vartype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue :ivar name: :vartype name: str :ivar value: :vartype value: str """ _attribute_map = { 'value_type': {'key': 'valueType', 'type': 'str'}, 'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[ParameterAssignment]'}, 'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'LegacyDataPath'}, 'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'DataSetDefinitionValue'}, 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, *, value_type: Optional[Union[str, "ParameterValueType"]] = None, assignments_to_concatenate: Optional[List["ParameterAssignment"]] = None, data_path_assignment: Optional["LegacyDataPath"] = None, data_set_definition_value_assignment: Optional["DataSetDefinitionValue"] = None, name: Optional[str] = None, value: Optional[str] = None, **kwargs ): """ :keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition". :paramtype value_type: str or ~flow.models.ParameterValueType :keyword assignments_to_concatenate: :paramtype assignments_to_concatenate: list[~flow.models.ParameterAssignment] :keyword data_path_assignment: :paramtype data_path_assignment: ~flow.models.LegacyDataPath :keyword data_set_definition_value_assignment: :paramtype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue :keyword name: :paramtype name: str :keyword value: :paramtype value: str """ super(ParameterAssignment, self).__init__(**kwargs) self.value_type = value_type self.assignments_to_concatenate = assignments_to_concatenate self.data_path_assignment = data_path_assignment self.data_set_definition_value_assignment = data_set_definition_value_assignment self.name = name self.value = value class ParameterDefinition(msrest.serialization.Model): """ParameterDefinition. :ivar name: :vartype name: str :ivar type: :vartype type: str :ivar value: :vartype value: str :ivar is_optional: :vartype is_optional: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, } def __init__( self, *, name: Optional[str] = None, type: Optional[str] = None, value: Optional[str] = None, is_optional: Optional[bool] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: :paramtype type: str :keyword value: :paramtype value: str :keyword is_optional: :paramtype is_optional: bool """ super(ParameterDefinition, self).__init__(**kwargs) self.name = name self.type = type self.value = value self.is_optional = is_optional class PatchFlowRequest(msrest.serialization.Model): """PatchFlowRequest. :ivar flow_patch_operation_type: Possible values include: "ArchiveFlow", "RestoreFlow", "ExportFlowToFile". :vartype flow_patch_operation_type: str or ~flow.models.FlowPatchOperationType :ivar flow_definition_file_path: :vartype flow_definition_file_path: str """ _attribute_map = { 'flow_patch_operation_type': {'key': 'flowPatchOperationType', 'type': 'str'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, } def __init__( self, *, flow_patch_operation_type: Optional[Union[str, "FlowPatchOperationType"]] = None, flow_definition_file_path: Optional[str] = None, **kwargs ): """ :keyword flow_patch_operation_type: Possible values include: "ArchiveFlow", "RestoreFlow", "ExportFlowToFile". :paramtype flow_patch_operation_type: str or ~flow.models.FlowPatchOperationType :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str """ super(PatchFlowRequest, self).__init__(**kwargs) self.flow_patch_operation_type = flow_patch_operation_type self.flow_definition_file_path = flow_definition_file_path class Pipeline(msrest.serialization.Model): """Pipeline. :ivar run_id: :vartype run_id: str :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar default_datastore_name: :vartype default_datastore_name: str :ivar component_jobs: This is a dictionary. :vartype component_jobs: dict[str, ~flow.models.ComponentJob] :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.PipelineInput] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.PipelineOutput] """ _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'default_datastore_name': {'key': 'defaultDatastoreName', 'type': 'str'}, 'component_jobs': {'key': 'componentJobs', 'type': '{ComponentJob}'}, 'inputs': {'key': 'inputs', 'type': '{PipelineInput}'}, 'outputs': {'key': 'outputs', 'type': '{PipelineOutput}'}, } def __init__( self, *, run_id: Optional[str] = None, continue_run_on_step_failure: Optional[bool] = None, default_datastore_name: Optional[str] = None, component_jobs: Optional[Dict[str, "ComponentJob"]] = None, inputs: Optional[Dict[str, "PipelineInput"]] = None, outputs: Optional[Dict[str, "PipelineOutput"]] = None, **kwargs ): """ :keyword run_id: :paramtype run_id: str :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword default_datastore_name: :paramtype default_datastore_name: str :keyword component_jobs: This is a dictionary. :paramtype component_jobs: dict[str, ~flow.models.ComponentJob] :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.PipelineInput] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.PipelineOutput] """ super(Pipeline, self).__init__(**kwargs) self.run_id = run_id self.continue_run_on_step_failure = continue_run_on_step_failure self.default_datastore_name = default_datastore_name self.component_jobs = component_jobs self.inputs = inputs self.outputs = outputs class PipelineDraft(msrest.serialization.Model): """PipelineDraft. :ivar graph_draft_id: :vartype graph_draft_id: str :ivar source_pipeline_run_id: :vartype source_pipeline_run_id: str :ivar latest_pipeline_run_id: :vartype latest_pipeline_run_id: str :ivar latest_run_experiment_name: :vartype latest_run_experiment_name: str :ivar latest_run_experiment_id: :vartype latest_run_experiment_id: str :ivar is_latest_run_experiment_archived: :vartype is_latest_run_experiment_archived: bool :ivar status: :vartype status: ~flow.models.PipelineStatus :ivar graph_detail: :vartype graph_detail: ~flow.models.PipelineRunGraphDetail :ivar real_time_endpoint_info: :vartype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo :ivar linked_pipelines_info: :vartype linked_pipelines_info: list[~flow.models.LinkedPipelineInfo] :ivar nodes_in_draft: :vartype nodes_in_draft: list[str] :ivar studio_migration_info: :vartype studio_migration_info: ~flow.models.StudioMigrationInfo :ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :ivar pipeline_run_setting_parameters: :vartype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter] :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar continue_run_on_failed_optional_input: :vartype continue_run_on_failed_optional_input: bool :ivar default_compute: :vartype default_compute: ~flow.models.ComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.DatastoreSetting :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.CloudPrioritySetting :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar pipeline_timeout: :vartype pipeline_timeout: int :ivar identity_config: :vartype identity_config: ~flow.models.IdentitySetting :ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :vartype graph_components_mode: str or ~flow.models.GraphComponentsMode :ivar name: :vartype name: str :ivar last_edited_by: :vartype last_edited_by: str :ivar created_by: :vartype created_by: str :ivar description: :vartype description: str :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'}, 'source_pipeline_run_id': {'key': 'sourcePipelineRunId', 'type': 'str'}, 'latest_pipeline_run_id': {'key': 'latestPipelineRunId', 'type': 'str'}, 'latest_run_experiment_name': {'key': 'latestRunExperimentName', 'type': 'str'}, 'latest_run_experiment_id': {'key': 'latestRunExperimentId', 'type': 'str'}, 'is_latest_run_experiment_archived': {'key': 'isLatestRunExperimentArchived', 'type': 'bool'}, 'status': {'key': 'status', 'type': 'PipelineStatus'}, 'graph_detail': {'key': 'graphDetail', 'type': 'PipelineRunGraphDetail'}, 'real_time_endpoint_info': {'key': 'realTimeEndpointInfo', 'type': 'RealTimeEndpointInfo'}, 'linked_pipelines_info': {'key': 'linkedPipelinesInfo', 'type': '[LinkedPipelineInfo]'}, 'nodes_in_draft': {'key': 'nodesInDraft', 'type': '[str]'}, 'studio_migration_info': {'key': 'studioMigrationInfo', 'type': 'StudioMigrationInfo'}, 'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'}, 'pipeline_run_setting_parameters': {'key': 'pipelineRunSettingParameters', 'type': '[RunSettingParameter]'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'continue_run_on_failed_optional_input': {'key': 'continueRunOnFailedOptionalInput', 'type': 'bool'}, 'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'}, 'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'pipeline_timeout': {'key': 'pipelineTimeout', 'type': 'int'}, 'identity_config': {'key': 'identityConfig', 'type': 'IdentitySetting'}, 'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'}, 'created_by': {'key': 'createdBy', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, graph_draft_id: Optional[str] = None, source_pipeline_run_id: Optional[str] = None, latest_pipeline_run_id: Optional[str] = None, latest_run_experiment_name: Optional[str] = None, latest_run_experiment_id: Optional[str] = None, is_latest_run_experiment_archived: Optional[bool] = None, status: Optional["PipelineStatus"] = None, graph_detail: Optional["PipelineRunGraphDetail"] = None, real_time_endpoint_info: Optional["RealTimeEndpointInfo"] = None, linked_pipelines_info: Optional[List["LinkedPipelineInfo"]] = None, nodes_in_draft: Optional[List[str]] = None, studio_migration_info: Optional["StudioMigrationInfo"] = None, flattened_sub_graphs: Optional[Dict[str, "PipelineSubDraft"]] = None, pipeline_run_setting_parameters: Optional[List["RunSettingParameter"]] = None, pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None, continue_run_on_step_failure: Optional[bool] = None, continue_run_on_failed_optional_input: Optional[bool] = None, default_compute: Optional["ComputeSetting"] = None, default_datastore: Optional["DatastoreSetting"] = None, default_cloud_priority: Optional["CloudPrioritySetting"] = None, enforce_rerun: Optional[bool] = None, pipeline_parameters: Optional[Dict[str, str]] = None, data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None, data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None, asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None, pipeline_timeout: Optional[int] = None, identity_config: Optional["IdentitySetting"] = None, graph_components_mode: Optional[Union[str, "GraphComponentsMode"]] = None, name: Optional[str] = None, last_edited_by: Optional[str] = None, created_by: Optional[str] = None, description: Optional[str] = None, pipeline_type: Optional[Union[str, "PipelineType"]] = None, pipeline_draft_mode: Optional[Union[str, "PipelineDraftMode"]] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, entity_status: Optional[Union[str, "EntityStatus"]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword graph_draft_id: :paramtype graph_draft_id: str :keyword source_pipeline_run_id: :paramtype source_pipeline_run_id: str :keyword latest_pipeline_run_id: :paramtype latest_pipeline_run_id: str :keyword latest_run_experiment_name: :paramtype latest_run_experiment_name: str :keyword latest_run_experiment_id: :paramtype latest_run_experiment_id: str :keyword is_latest_run_experiment_archived: :paramtype is_latest_run_experiment_archived: bool :keyword status: :paramtype status: ~flow.models.PipelineStatus :keyword graph_detail: :paramtype graph_detail: ~flow.models.PipelineRunGraphDetail :keyword real_time_endpoint_info: :paramtype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo :keyword linked_pipelines_info: :paramtype linked_pipelines_info: list[~flow.models.LinkedPipelineInfo] :keyword nodes_in_draft: :paramtype nodes_in_draft: list[str] :keyword studio_migration_info: :paramtype studio_migration_info: ~flow.models.StudioMigrationInfo :keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :keyword pipeline_run_setting_parameters: :paramtype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter] :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword continue_run_on_failed_optional_input: :paramtype continue_run_on_failed_optional_input: bool :keyword default_compute: :paramtype default_compute: ~flow.models.ComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.DatastoreSetting :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword pipeline_timeout: :paramtype pipeline_timeout: int :keyword identity_config: :paramtype identity_config: ~flow.models.IdentitySetting :keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode :keyword name: :paramtype name: str :keyword last_edited_by: :paramtype last_edited_by: str :keyword created_by: :paramtype created_by: str :keyword description: :paramtype description: str :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineDraft, self).__init__(**kwargs) self.graph_draft_id = graph_draft_id self.source_pipeline_run_id = source_pipeline_run_id self.latest_pipeline_run_id = latest_pipeline_run_id self.latest_run_experiment_name = latest_run_experiment_name self.latest_run_experiment_id = latest_run_experiment_id self.is_latest_run_experiment_archived = is_latest_run_experiment_archived self.status = status self.graph_detail = graph_detail self.real_time_endpoint_info = real_time_endpoint_info self.linked_pipelines_info = linked_pipelines_info self.nodes_in_draft = nodes_in_draft self.studio_migration_info = studio_migration_info self.flattened_sub_graphs = flattened_sub_graphs self.pipeline_run_setting_parameters = pipeline_run_setting_parameters self.pipeline_run_settings = pipeline_run_settings self.continue_run_on_step_failure = continue_run_on_step_failure self.continue_run_on_failed_optional_input = continue_run_on_failed_optional_input self.default_compute = default_compute self.default_datastore = default_datastore self.default_cloud_priority = default_cloud_priority self.enforce_rerun = enforce_rerun self.pipeline_parameters = pipeline_parameters self.data_path_assignments = data_path_assignments self.data_set_definition_value_assignments = data_set_definition_value_assignments self.asset_output_settings_assignments = asset_output_settings_assignments self.pipeline_timeout = pipeline_timeout self.identity_config = identity_config self.graph_components_mode = graph_components_mode self.name = name self.last_edited_by = last_edited_by self.created_by = created_by self.description = description self.pipeline_type = pipeline_type self.pipeline_draft_mode = pipeline_draft_mode self.tags = tags self.properties = properties self.entity_status = entity_status self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class PipelineDraftStepDetails(msrest.serialization.Model): """PipelineDraftStepDetails. :ivar run_id: :vartype run_id: str :ivar target: :vartype target: str :ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype status: str or ~flow.models.RunStatus :ivar status_detail: :vartype status_detail: str :ivar parent_run_id: :vartype parent_run_id: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar is_reused: :vartype is_reused: bool :ivar reused_run_id: :vartype reused_run_id: str :ivar reused_pipeline_run_id: :vartype reused_pipeline_run_id: str :ivar logs: This is a dictionary. :vartype logs: dict[str, str] :ivar output_log: :vartype output_log: str :ivar run_configuration: :vartype run_configuration: ~flow.models.RunConfiguration :ivar outputs: This is a dictionary. :vartype outputs: dict[str, str] :ivar port_outputs: This is a dictionary. :vartype port_outputs: dict[str, ~flow.models.PortOutputInfo] :ivar is_experiment_archived: :vartype is_experiment_archived: bool """ _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'is_reused': {'key': 'isReused', 'type': 'bool'}, 'reused_run_id': {'key': 'reusedRunId', 'type': 'str'}, 'reused_pipeline_run_id': {'key': 'reusedPipelineRunId', 'type': 'str'}, 'logs': {'key': 'logs', 'type': '{str}'}, 'output_log': {'key': 'outputLog', 'type': 'str'}, 'run_configuration': {'key': 'runConfiguration', 'type': 'RunConfiguration'}, 'outputs': {'key': 'outputs', 'type': '{str}'}, 'port_outputs': {'key': 'portOutputs', 'type': '{PortOutputInfo}'}, 'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'}, } def __init__( self, *, run_id: Optional[str] = None, target: Optional[str] = None, status: Optional[Union[str, "RunStatus"]] = None, status_detail: Optional[str] = None, parent_run_id: Optional[str] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, is_reused: Optional[bool] = None, reused_run_id: Optional[str] = None, reused_pipeline_run_id: Optional[str] = None, logs: Optional[Dict[str, str]] = None, output_log: Optional[str] = None, run_configuration: Optional["RunConfiguration"] = None, outputs: Optional[Dict[str, str]] = None, port_outputs: Optional[Dict[str, "PortOutputInfo"]] = None, is_experiment_archived: Optional[bool] = None, **kwargs ): """ :keyword run_id: :paramtype run_id: str :keyword target: :paramtype target: str :keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype status: str or ~flow.models.RunStatus :keyword status_detail: :paramtype status_detail: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword is_reused: :paramtype is_reused: bool :keyword reused_run_id: :paramtype reused_run_id: str :keyword reused_pipeline_run_id: :paramtype reused_pipeline_run_id: str :keyword logs: This is a dictionary. :paramtype logs: dict[str, str] :keyword output_log: :paramtype output_log: str :keyword run_configuration: :paramtype run_configuration: ~flow.models.RunConfiguration :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, str] :keyword port_outputs: This is a dictionary. :paramtype port_outputs: dict[str, ~flow.models.PortOutputInfo] :keyword is_experiment_archived: :paramtype is_experiment_archived: bool """ super(PipelineDraftStepDetails, self).__init__(**kwargs) self.run_id = run_id self.target = target self.status = status self.status_detail = status_detail self.parent_run_id = parent_run_id self.start_time = start_time self.end_time = end_time self.is_reused = is_reused self.reused_run_id = reused_run_id self.reused_pipeline_run_id = reused_pipeline_run_id self.logs = logs self.output_log = output_log self.run_configuration = run_configuration self.outputs = outputs self.port_outputs = port_outputs self.is_experiment_archived = is_experiment_archived class PipelineDraftSummary(msrest.serialization.Model): """PipelineDraftSummary. :ivar name: :vartype name: str :ivar last_edited_by: :vartype last_edited_by: str :ivar created_by: :vartype created_by: str :ivar description: :vartype description: str :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'}, 'created_by': {'key': 'createdBy', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, name: Optional[str] = None, last_edited_by: Optional[str] = None, created_by: Optional[str] = None, description: Optional[str] = None, pipeline_type: Optional[Union[str, "PipelineType"]] = None, pipeline_draft_mode: Optional[Union[str, "PipelineDraftMode"]] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, entity_status: Optional[Union[str, "EntityStatus"]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword last_edited_by: :paramtype last_edited_by: str :keyword created_by: :paramtype created_by: str :keyword description: :paramtype description: str :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineDraftSummary, self).__init__(**kwargs) self.name = name self.last_edited_by = last_edited_by self.created_by = created_by self.description = description self.pipeline_type = pipeline_type self.pipeline_draft_mode = pipeline_draft_mode self.tags = tags self.properties = properties self.entity_status = entity_status self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class PipelineEndpoint(msrest.serialization.Model): """PipelineEndpoint. :ivar default_version: :vartype default_version: str :ivar default_pipeline_id: :vartype default_pipeline_id: str :ivar default_graph_id: :vartype default_graph_id: str :ivar rest_endpoint: :vartype rest_endpoint: str :ivar published_date: :vartype published_date: ~datetime.datetime :ivar published_by: :vartype published_by: str :ivar parameters: This is a dictionary. :vartype parameters: dict[str, str] :ivar data_set_definition_value_assignment: This is a dictionary. :vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue] :ivar default_pipeline_name: :vartype default_pipeline_name: str :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar updated_by: :vartype updated_by: str :ivar swagger_url: :vartype swagger_url: str :ivar last_run_time: :vartype last_run_time: ~datetime.datetime :ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :vartype last_run_status: str or ~flow.models.PipelineRunStatusCode :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'default_version': {'key': 'defaultVersion', 'type': 'str'}, 'default_pipeline_id': {'key': 'defaultPipelineId', 'type': 'str'}, 'default_graph_id': {'key': 'defaultGraphId', 'type': 'str'}, 'rest_endpoint': {'key': 'restEndpoint', 'type': 'str'}, 'published_date': {'key': 'publishedDate', 'type': 'iso-8601'}, 'published_by': {'key': 'publishedBy', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'}, 'default_pipeline_name': {'key': 'defaultPipelineName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'updated_by': {'key': 'updatedBy', 'type': 'str'}, 'swagger_url': {'key': 'swaggerUrl', 'type': 'str'}, 'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'}, 'last_run_status': {'key': 'lastRunStatus', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, default_version: Optional[str] = None, default_pipeline_id: Optional[str] = None, default_graph_id: Optional[str] = None, rest_endpoint: Optional[str] = None, published_date: Optional[datetime.datetime] = None, published_by: Optional[str] = None, parameters: Optional[Dict[str, str]] = None, data_set_definition_value_assignment: Optional[Dict[str, "DataSetDefinitionValue"]] = None, default_pipeline_name: Optional[str] = None, name: Optional[str] = None, description: Optional[str] = None, updated_by: Optional[str] = None, swagger_url: Optional[str] = None, last_run_time: Optional[datetime.datetime] = None, last_run_status: Optional[Union[str, "PipelineRunStatusCode"]] = None, tags: Optional[Dict[str, str]] = None, entity_status: Optional[Union[str, "EntityStatus"]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword default_version: :paramtype default_version: str :keyword default_pipeline_id: :paramtype default_pipeline_id: str :keyword default_graph_id: :paramtype default_graph_id: str :keyword rest_endpoint: :paramtype rest_endpoint: str :keyword published_date: :paramtype published_date: ~datetime.datetime :keyword published_by: :paramtype published_by: str :keyword parameters: This is a dictionary. :paramtype parameters: dict[str, str] :keyword data_set_definition_value_assignment: This is a dictionary. :paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue] :keyword default_pipeline_name: :paramtype default_pipeline_name: str :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword updated_by: :paramtype updated_by: str :keyword swagger_url: :paramtype swagger_url: str :keyword last_run_time: :paramtype last_run_time: ~datetime.datetime :keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineEndpoint, self).__init__(**kwargs) self.default_version = default_version self.default_pipeline_id = default_pipeline_id self.default_graph_id = default_graph_id self.rest_endpoint = rest_endpoint self.published_date = published_date self.published_by = published_by self.parameters = parameters self.data_set_definition_value_assignment = data_set_definition_value_assignment self.default_pipeline_name = default_pipeline_name self.name = name self.description = description self.updated_by = updated_by self.swagger_url = swagger_url self.last_run_time = last_run_time self.last_run_status = last_run_status self.tags = tags self.entity_status = entity_status self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class PipelineEndpointSummary(msrest.serialization.Model): """PipelineEndpointSummary. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar updated_by: :vartype updated_by: str :ivar swagger_url: :vartype swagger_url: str :ivar last_run_time: :vartype last_run_time: ~datetime.datetime :ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :vartype last_run_status: str or ~flow.models.PipelineRunStatusCode :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'updated_by': {'key': 'updatedBy', 'type': 'str'}, 'swagger_url': {'key': 'swaggerUrl', 'type': 'str'}, 'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'}, 'last_run_status': {'key': 'lastRunStatus', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, name: Optional[str] = None, description: Optional[str] = None, updated_by: Optional[str] = None, swagger_url: Optional[str] = None, last_run_time: Optional[datetime.datetime] = None, last_run_status: Optional[Union[str, "PipelineRunStatusCode"]] = None, tags: Optional[Dict[str, str]] = None, entity_status: Optional[Union[str, "EntityStatus"]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword updated_by: :paramtype updated_by: str :keyword swagger_url: :paramtype swagger_url: str :keyword last_run_time: :paramtype last_run_time: ~datetime.datetime :keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineEndpointSummary, self).__init__(**kwargs) self.name = name self.description = description self.updated_by = updated_by self.swagger_url = swagger_url self.last_run_time = last_run_time self.last_run_status = last_run_status self.tags = tags self.entity_status = entity_status self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class PipelineGraph(msrest.serialization.Model): """PipelineGraph. :ivar graph_module_dtos: :vartype graph_module_dtos: list[~flow.models.ModuleDto] :ivar graph_data_sources: :vartype graph_data_sources: list[~flow.models.DataInfo] :ivar graphs: This is a dictionary. :vartype graphs: dict[str, ~flow.models.PipelineGraph] :ivar graph_drafts: This is a dictionary. :vartype graph_drafts: dict[str, ~flow.models.PipelineGraph] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar sub_pipelines_info: :vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo :ivar referenced_node_id: :vartype referenced_node_id: str :ivar pipeline_run_setting_parameters: :vartype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter] :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar real_time_endpoint_info: :vartype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo :ivar node_telemetry_meta_infos: :vartype node_telemetry_meta_infos: list[~flow.models.NodeTelemetryMetaInfo] :ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :vartype graph_components_mode: str or ~flow.models.GraphComponentsMode :ivar module_nodes: :vartype module_nodes: list[~flow.models.GraphModuleNode] :ivar dataset_nodes: :vartype dataset_nodes: list[~flow.models.GraphDatasetNode] :ivar sub_graph_nodes: :vartype sub_graph_nodes: list[~flow.models.GraphReferenceNode] :ivar control_reference_nodes: :vartype control_reference_nodes: list[~flow.models.GraphControlReferenceNode] :ivar control_nodes: :vartype control_nodes: list[~flow.models.GraphControlNode] :ivar edges: :vartype edges: list[~flow.models.GraphEdge] :ivar entity_interface: :vartype entity_interface: ~flow.models.EntityInterface :ivar graph_layout: :vartype graph_layout: ~flow.models.GraphLayout :ivar created_by: :vartype created_by: ~flow.models.CreatedBy :ivar last_updated_by: :vartype last_updated_by: ~flow.models.CreatedBy :ivar default_compute: :vartype default_compute: ~flow.models.ComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.DatastoreSetting :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.CloudPrioritySetting :ivar extended_properties: This is a dictionary. :vartype extended_properties: dict[str, str] :ivar parent_sub_graph_module_ids: :vartype parent_sub_graph_module_ids: list[str] :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'graph_module_dtos': {'key': 'graphModuleDtos', 'type': '[ModuleDto]'}, 'graph_data_sources': {'key': 'graphDataSources', 'type': '[DataInfo]'}, 'graphs': {'key': 'graphs', 'type': '{PipelineGraph}'}, 'graph_drafts': {'key': 'graphDrafts', 'type': '{PipelineGraph}'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'}, 'referenced_node_id': {'key': 'referencedNodeId', 'type': 'str'}, 'pipeline_run_setting_parameters': {'key': 'pipelineRunSettingParameters', 'type': '[RunSettingParameter]'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'real_time_endpoint_info': {'key': 'realTimeEndpointInfo', 'type': 'RealTimeEndpointInfo'}, 'node_telemetry_meta_infos': {'key': 'nodeTelemetryMetaInfos', 'type': '[NodeTelemetryMetaInfo]'}, 'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'}, 'module_nodes': {'key': 'moduleNodes', 'type': '[GraphModuleNode]'}, 'dataset_nodes': {'key': 'datasetNodes', 'type': '[GraphDatasetNode]'}, 'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[GraphReferenceNode]'}, 'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[GraphControlReferenceNode]'}, 'control_nodes': {'key': 'controlNodes', 'type': '[GraphControlNode]'}, 'edges': {'key': 'edges', 'type': '[GraphEdge]'}, 'entity_interface': {'key': 'entityInterface', 'type': 'EntityInterface'}, 'graph_layout': {'key': 'graphLayout', 'type': 'GraphLayout'}, 'created_by': {'key': 'createdBy', 'type': 'CreatedBy'}, 'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'}, 'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'}, 'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'}, 'extended_properties': {'key': 'extendedProperties', 'type': '{str}'}, 'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, graph_module_dtos: Optional[List["ModuleDto"]] = None, graph_data_sources: Optional[List["DataInfo"]] = None, graphs: Optional[Dict[str, "PipelineGraph"]] = None, graph_drafts: Optional[Dict[str, "PipelineGraph"]] = None, module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None, module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None, sub_pipelines_info: Optional["SubPipelinesInfo"] = None, referenced_node_id: Optional[str] = None, pipeline_run_setting_parameters: Optional[List["RunSettingParameter"]] = None, pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None, real_time_endpoint_info: Optional["RealTimeEndpointInfo"] = None, node_telemetry_meta_infos: Optional[List["NodeTelemetryMetaInfo"]] = None, graph_components_mode: Optional[Union[str, "GraphComponentsMode"]] = None, module_nodes: Optional[List["GraphModuleNode"]] = None, dataset_nodes: Optional[List["GraphDatasetNode"]] = None, sub_graph_nodes: Optional[List["GraphReferenceNode"]] = None, control_reference_nodes: Optional[List["GraphControlReferenceNode"]] = None, control_nodes: Optional[List["GraphControlNode"]] = None, edges: Optional[List["GraphEdge"]] = None, entity_interface: Optional["EntityInterface"] = None, graph_layout: Optional["GraphLayout"] = None, created_by: Optional["CreatedBy"] = None, last_updated_by: Optional["CreatedBy"] = None, default_compute: Optional["ComputeSetting"] = None, default_datastore: Optional["DatastoreSetting"] = None, default_cloud_priority: Optional["CloudPrioritySetting"] = None, extended_properties: Optional[Dict[str, str]] = None, parent_sub_graph_module_ids: Optional[List[str]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword graph_module_dtos: :paramtype graph_module_dtos: list[~flow.models.ModuleDto] :keyword graph_data_sources: :paramtype graph_data_sources: list[~flow.models.DataInfo] :keyword graphs: This is a dictionary. :paramtype graphs: dict[str, ~flow.models.PipelineGraph] :keyword graph_drafts: This is a dictionary. :paramtype graph_drafts: dict[str, ~flow.models.PipelineGraph] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword sub_pipelines_info: :paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo :keyword referenced_node_id: :paramtype referenced_node_id: str :keyword pipeline_run_setting_parameters: :paramtype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter] :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword real_time_endpoint_info: :paramtype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo :keyword node_telemetry_meta_infos: :paramtype node_telemetry_meta_infos: list[~flow.models.NodeTelemetryMetaInfo] :keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode :keyword module_nodes: :paramtype module_nodes: list[~flow.models.GraphModuleNode] :keyword dataset_nodes: :paramtype dataset_nodes: list[~flow.models.GraphDatasetNode] :keyword sub_graph_nodes: :paramtype sub_graph_nodes: list[~flow.models.GraphReferenceNode] :keyword control_reference_nodes: :paramtype control_reference_nodes: list[~flow.models.GraphControlReferenceNode] :keyword control_nodes: :paramtype control_nodes: list[~flow.models.GraphControlNode] :keyword edges: :paramtype edges: list[~flow.models.GraphEdge] :keyword entity_interface: :paramtype entity_interface: ~flow.models.EntityInterface :keyword graph_layout: :paramtype graph_layout: ~flow.models.GraphLayout :keyword created_by: :paramtype created_by: ~flow.models.CreatedBy :keyword last_updated_by: :paramtype last_updated_by: ~flow.models.CreatedBy :keyword default_compute: :paramtype default_compute: ~flow.models.ComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.DatastoreSetting :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting :keyword extended_properties: This is a dictionary. :paramtype extended_properties: dict[str, str] :keyword parent_sub_graph_module_ids: :paramtype parent_sub_graph_module_ids: list[str] :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineGraph, self).__init__(**kwargs) self.graph_module_dtos = graph_module_dtos self.graph_data_sources = graph_data_sources self.graphs = graphs self.graph_drafts = graph_drafts self.module_node_run_settings = module_node_run_settings self.module_node_ui_input_settings = module_node_ui_input_settings self.sub_pipelines_info = sub_pipelines_info self.referenced_node_id = referenced_node_id self.pipeline_run_setting_parameters = pipeline_run_setting_parameters self.pipeline_run_settings = pipeline_run_settings self.real_time_endpoint_info = real_time_endpoint_info self.node_telemetry_meta_infos = node_telemetry_meta_infos self.graph_components_mode = graph_components_mode self.module_nodes = module_nodes self.dataset_nodes = dataset_nodes self.sub_graph_nodes = sub_graph_nodes self.control_reference_nodes = control_reference_nodes self.control_nodes = control_nodes self.edges = edges self.entity_interface = entity_interface self.graph_layout = graph_layout self.created_by = created_by self.last_updated_by = last_updated_by self.default_compute = default_compute self.default_datastore = default_datastore self.default_cloud_priority = default_cloud_priority self.extended_properties = extended_properties self.parent_sub_graph_module_ids = parent_sub_graph_module_ids self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class PipelineInput(msrest.serialization.Model): """PipelineInput. :ivar data: :vartype data: ~flow.models.InputData """ _attribute_map = { 'data': {'key': 'data', 'type': 'InputData'}, } def __init__( self, *, data: Optional["InputData"] = None, **kwargs ): """ :keyword data: :paramtype data: ~flow.models.InputData """ super(PipelineInput, self).__init__(**kwargs) self.data = data class PipelineJob(msrest.serialization.Model): """PipelineJob. :ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :vartype job_type: str or ~flow.models.JobType :ivar pipeline_job_type: The only acceptable values to pass in are None and "AzureML". The default value is None. :vartype pipeline_job_type: str :ivar pipeline: :vartype pipeline: ~flow.models.Pipeline :ivar compute_id: :vartype compute_id: str :ivar run_id: :vartype run_id: str :ivar settings: Anything. :vartype settings: any :ivar component_jobs: This is a dictionary. :vartype component_jobs: dict[str, ~flow.models.MfeInternalV20211001ComponentJob] :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.JobInput] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.JobOutput] :ivar bindings: :vartype bindings: list[~flow.models.Binding] :ivar jobs: This is a dictionary. :vartype jobs: dict[str, any] :ivar input_bindings: This is a dictionary. :vartype input_bindings: dict[str, ~flow.models.InputDataBinding] :ivar output_bindings: This is a dictionary. :vartype output_bindings: dict[str, ~flow.models.OutputDataBinding] :ivar source_job_id: :vartype source_job_id: str :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled", "InProgress". :vartype provisioning_state: str or ~flow.models.JobProvisioningState :ivar parent_job_name: :vartype parent_job_name: str :ivar display_name: :vartype display_name: str :ivar experiment_name: :vartype experiment_name: str :ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". :vartype status: str or ~flow.models.JobStatus :ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`. :vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint] :ivar identity: :vartype identity: ~flow.models.MfeInternalIdentityConfiguration :ivar compute: :vartype compute: ~flow.models.ComputeConfiguration :ivar priority: :vartype priority: int :ivar output: :vartype output: ~flow.models.JobOutputArtifacts :ivar is_archived: :vartype is_archived: bool :ivar schedule: :vartype schedule: ~flow.models.ScheduleBase :ivar component_id: :vartype component_id: str :ivar notification_setting: :vartype notification_setting: ~flow.models.NotificationSetting :ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`. :vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration] :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'job_type': {'key': 'jobType', 'type': 'str'}, 'pipeline_job_type': {'key': 'pipelineJobType', 'type': 'str'}, 'pipeline': {'key': 'pipeline', 'type': 'Pipeline'}, 'compute_id': {'key': 'computeId', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'settings': {'key': 'settings', 'type': 'object'}, 'component_jobs': {'key': 'componentJobs', 'type': '{MfeInternalV20211001ComponentJob}'}, 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, 'bindings': {'key': 'bindings', 'type': '[Binding]'}, 'jobs': {'key': 'jobs', 'type': '{object}'}, 'input_bindings': {'key': 'inputBindings', 'type': '{InputDataBinding}'}, 'output_bindings': {'key': 'outputBindings', 'type': '{OutputDataBinding}'}, 'source_job_id': {'key': 'sourceJobId', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'parent_job_name': {'key': 'parentJobName', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, 'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'}, 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, 'priority': {'key': 'priority', 'type': 'int'}, 'output': {'key': 'output', 'type': 'JobOutputArtifacts'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'schedule': {'key': 'schedule', 'type': 'ScheduleBase'}, 'component_id': {'key': 'componentId', 'type': 'str'}, 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, *, job_type: Optional[Union[str, "JobType"]] = None, pipeline_job_type: Optional[str] = None, pipeline: Optional["Pipeline"] = None, compute_id: Optional[str] = None, run_id: Optional[str] = None, settings: Optional[Any] = None, component_jobs: Optional[Dict[str, "MfeInternalV20211001ComponentJob"]] = None, inputs: Optional[Dict[str, "JobInput"]] = None, outputs: Optional[Dict[str, "JobOutput"]] = None, bindings: Optional[List["Binding"]] = None, jobs: Optional[Dict[str, Any]] = None, input_bindings: Optional[Dict[str, "InputDataBinding"]] = None, output_bindings: Optional[Dict[str, "OutputDataBinding"]] = None, source_job_id: Optional[str] = None, provisioning_state: Optional[Union[str, "JobProvisioningState"]] = None, parent_job_name: Optional[str] = None, display_name: Optional[str] = None, experiment_name: Optional[str] = None, status: Optional[Union[str, "JobStatus"]] = None, interaction_endpoints: Optional[Dict[str, "JobEndpoint"]] = None, identity: Optional["MfeInternalIdentityConfiguration"] = None, compute: Optional["ComputeConfiguration"] = None, priority: Optional[int] = None, output: Optional["JobOutputArtifacts"] = None, is_archived: Optional[bool] = None, schedule: Optional["ScheduleBase"] = None, component_id: Optional[str] = None, notification_setting: Optional["NotificationSetting"] = None, secrets_configuration: Optional[Dict[str, "MfeInternalSecretConfiguration"]] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :paramtype job_type: str or ~flow.models.JobType :keyword pipeline_job_type: The only acceptable values to pass in are None and "AzureML". The default value is None. :paramtype pipeline_job_type: str :keyword pipeline: :paramtype pipeline: ~flow.models.Pipeline :keyword compute_id: :paramtype compute_id: str :keyword run_id: :paramtype run_id: str :keyword settings: Anything. :paramtype settings: any :keyword component_jobs: This is a dictionary. :paramtype component_jobs: dict[str, ~flow.models.MfeInternalV20211001ComponentJob] :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.JobInput] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.JobOutput] :keyword bindings: :paramtype bindings: list[~flow.models.Binding] :keyword jobs: This is a dictionary. :paramtype jobs: dict[str, any] :keyword input_bindings: This is a dictionary. :paramtype input_bindings: dict[str, ~flow.models.InputDataBinding] :keyword output_bindings: This is a dictionary. :paramtype output_bindings: dict[str, ~flow.models.OutputDataBinding] :keyword source_job_id: :paramtype source_job_id: str :keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled", "InProgress". :paramtype provisioning_state: str or ~flow.models.JobProvisioningState :keyword parent_job_name: :paramtype parent_job_name: str :keyword display_name: :paramtype display_name: str :keyword experiment_name: :paramtype experiment_name: str :keyword status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". :paramtype status: str or ~flow.models.JobStatus :keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`. :paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint] :keyword identity: :paramtype identity: ~flow.models.MfeInternalIdentityConfiguration :keyword compute: :paramtype compute: ~flow.models.ComputeConfiguration :keyword priority: :paramtype priority: int :keyword output: :paramtype output: ~flow.models.JobOutputArtifacts :keyword is_archived: :paramtype is_archived: bool :keyword schedule: :paramtype schedule: ~flow.models.ScheduleBase :keyword component_id: :paramtype component_id: str :keyword notification_setting: :paramtype notification_setting: ~flow.models.NotificationSetting :keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`. :paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration] :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(PipelineJob, self).__init__(**kwargs) self.job_type = job_type self.pipeline_job_type = pipeline_job_type self.pipeline = pipeline self.compute_id = compute_id self.run_id = run_id self.settings = settings self.component_jobs = component_jobs self.inputs = inputs self.outputs = outputs self.bindings = bindings self.jobs = jobs self.input_bindings = input_bindings self.output_bindings = output_bindings self.source_job_id = source_job_id self.provisioning_state = provisioning_state self.parent_job_name = parent_job_name self.display_name = display_name self.experiment_name = experiment_name self.status = status self.interaction_endpoints = interaction_endpoints self.identity = identity self.compute = compute self.priority = priority self.output = output self.is_archived = is_archived self.schedule = schedule self.component_id = component_id self.notification_setting = notification_setting self.secrets_configuration = secrets_configuration self.description = description self.tags = tags self.properties = properties class PipelineJobRuntimeBasicSettings(msrest.serialization.Model): """PipelineJobRuntimeBasicSettings. :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar experiment_name: :vartype experiment_name: str :ivar pipeline_job_name: :vartype pipeline_job_name: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar trigger_time_string: :vartype trigger_time_string: str :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] """ _attribute_map = { 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, } def __init__( self, *, pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None, experiment_name: Optional[str] = None, pipeline_job_name: Optional[str] = None, tags: Optional[Dict[str, str]] = None, display_name: Optional[str] = None, description: Optional[str] = None, trigger_time_string: Optional[str] = None, pipeline_parameters: Optional[Dict[str, str]] = None, data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None, data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None, asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None, **kwargs ): """ :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword experiment_name: :paramtype experiment_name: str :keyword pipeline_job_name: :paramtype pipeline_job_name: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword trigger_time_string: :paramtype trigger_time_string: str :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] """ super(PipelineJobRuntimeBasicSettings, self).__init__(**kwargs) self.pipeline_run_settings = pipeline_run_settings self.experiment_name = experiment_name self.pipeline_job_name = pipeline_job_name self.tags = tags self.display_name = display_name self.description = description self.trigger_time_string = trigger_time_string self.pipeline_parameters = pipeline_parameters self.data_path_assignments = data_path_assignments self.data_set_definition_value_assignments = data_set_definition_value_assignments self.asset_output_settings_assignments = asset_output_settings_assignments class PipelineJobScheduleDto(msrest.serialization.Model): """PipelineJobScheduleDto. :ivar system_data: :vartype system_data: ~flow.models.SystemData :ivar name: :vartype name: str :ivar pipeline_job_name: :vartype pipeline_job_name: str :ivar pipeline_job_runtime_settings: :vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings :ivar display_name: :vartype display_name: str :ivar trigger_type: Possible values include: "Recurrence", "Cron". :vartype trigger_type: str or ~flow.models.TriggerType :ivar recurrence: :vartype recurrence: ~flow.models.Recurrence :ivar cron: :vartype cron: ~flow.models.Cron :ivar status: Possible values include: "Enabled", "Disabled". :vartype status: str or ~flow.models.ScheduleStatus :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'name': {'key': 'name', 'type': 'str'}, 'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'}, 'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'trigger_type': {'key': 'triggerType', 'type': 'str'}, 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, 'cron': {'key': 'cron', 'type': 'Cron'}, 'status': {'key': 'status', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, *, system_data: Optional["SystemData"] = None, name: Optional[str] = None, pipeline_job_name: Optional[str] = None, pipeline_job_runtime_settings: Optional["PipelineJobRuntimeBasicSettings"] = None, display_name: Optional[str] = None, trigger_type: Optional[Union[str, "TriggerType"]] = None, recurrence: Optional["Recurrence"] = None, cron: Optional["Cron"] = None, status: Optional[Union[str, "ScheduleStatus"]] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword system_data: :paramtype system_data: ~flow.models.SystemData :keyword name: :paramtype name: str :keyword pipeline_job_name: :paramtype pipeline_job_name: str :keyword pipeline_job_runtime_settings: :paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings :keyword display_name: :paramtype display_name: str :keyword trigger_type: Possible values include: "Recurrence", "Cron". :paramtype trigger_type: str or ~flow.models.TriggerType :keyword recurrence: :paramtype recurrence: ~flow.models.Recurrence :keyword cron: :paramtype cron: ~flow.models.Cron :keyword status: Possible values include: "Enabled", "Disabled". :paramtype status: str or ~flow.models.ScheduleStatus :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(PipelineJobScheduleDto, self).__init__(**kwargs) self.system_data = system_data self.name = name self.pipeline_job_name = pipeline_job_name self.pipeline_job_runtime_settings = pipeline_job_runtime_settings self.display_name = display_name self.trigger_type = trigger_type self.recurrence = recurrence self.cron = cron self.status = status self.description = description self.tags = tags self.properties = properties class PipelineOutput(msrest.serialization.Model): """PipelineOutput. :ivar data: :vartype data: ~flow.models.MfeInternalOutputData """ _attribute_map = { 'data': {'key': 'data', 'type': 'MfeInternalOutputData'}, } def __init__( self, *, data: Optional["MfeInternalOutputData"] = None, **kwargs ): """ :keyword data: :paramtype data: ~flow.models.MfeInternalOutputData """ super(PipelineOutput, self).__init__(**kwargs) self.data = data class PipelineRun(msrest.serialization.Model): """PipelineRun. :ivar pipeline_id: :vartype pipeline_id: str :ivar run_source: :vartype run_source: str :ivar run_type: Possible values include: "HTTP", "SDK", "Schedule", "Portal". :vartype run_type: str or ~flow.models.RunType :ivar parameters: This is a dictionary. :vartype parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignment: This is a dictionary. :vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar total_steps: :vartype total_steps: int :ivar logs: This is a dictionary. :vartype logs: dict[str, str] :ivar user_alias: :vartype user_alias: str :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar continue_run_on_failed_optional_input: :vartype continue_run_on_failed_optional_input: bool :ivar default_compute: :vartype default_compute: ~flow.models.ComputeSetting :ivar default_datastore: :vartype default_datastore: ~flow.models.DatastoreSetting :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.CloudPrioritySetting :ivar pipeline_timeout_seconds: :vartype pipeline_timeout_seconds: int :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar identity_config: :vartype identity_config: ~flow.models.IdentitySetting :ivar description: :vartype description: str :ivar display_name: :vartype display_name: str :ivar run_number: :vartype run_number: int :ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown". :vartype status_code: str or ~flow.models.PipelineStatusCode :ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype run_status: str or ~flow.models.RunStatus :ivar status_detail: :vartype status_detail: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar graph_id: :vartype graph_id: str :ivar experiment_id: :vartype experiment_id: str :ivar experiment_name: :vartype experiment_name: str :ivar is_experiment_archived: :vartype is_experiment_archived: bool :ivar submitted_by: :vartype submitted_by: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar step_tags: This is a dictionary. :vartype step_tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar aether_start_time: :vartype aether_start_time: ~datetime.datetime :ivar aether_end_time: :vartype aether_end_time: ~datetime.datetime :ivar run_history_start_time: :vartype run_history_start_time: ~datetime.datetime :ivar run_history_end_time: :vartype run_history_end_time: ~datetime.datetime :ivar unique_child_run_compute_targets: :vartype unique_child_run_compute_targets: list[str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _validation = { 'unique_child_run_compute_targets': {'unique': True}, } _attribute_map = { 'pipeline_id': {'key': 'pipelineId', 'type': 'str'}, 'run_source': {'key': 'runSource', 'type': 'str'}, 'run_type': {'key': 'runType', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'total_steps': {'key': 'totalSteps', 'type': 'int'}, 'logs': {'key': 'logs', 'type': '{str}'}, 'user_alias': {'key': 'userAlias', 'type': 'str'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'continue_run_on_failed_optional_input': {'key': 'continueRunOnFailedOptionalInput', 'type': 'bool'}, 'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'}, 'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'}, 'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'}, 'pipeline_timeout_seconds': {'key': 'pipelineTimeoutSeconds', 'type': 'int'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'identity_config': {'key': 'identityConfig', 'type': 'IdentitySetting'}, 'description': {'key': 'description', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'run_number': {'key': 'runNumber', 'type': 'int'}, 'status_code': {'key': 'statusCode', 'type': 'str'}, 'run_status': {'key': 'runStatus', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'graph_id': {'key': 'graphId', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'}, 'submitted_by': {'key': 'submittedBy', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'step_tags': {'key': 'stepTags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'}, 'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'}, 'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'}, 'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'}, 'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, pipeline_id: Optional[str] = None, run_source: Optional[str] = None, run_type: Optional[Union[str, "RunType"]] = None, parameters: Optional[Dict[str, str]] = None, data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None, data_set_definition_value_assignment: Optional[Dict[str, "DataSetDefinitionValue"]] = None, asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None, total_steps: Optional[int] = None, logs: Optional[Dict[str, str]] = None, user_alias: Optional[str] = None, enforce_rerun: Optional[bool] = None, continue_run_on_failed_optional_input: Optional[bool] = None, default_compute: Optional["ComputeSetting"] = None, default_datastore: Optional["DatastoreSetting"] = None, default_cloud_priority: Optional["CloudPrioritySetting"] = None, pipeline_timeout_seconds: Optional[int] = None, continue_run_on_step_failure: Optional[bool] = None, identity_config: Optional["IdentitySetting"] = None, description: Optional[str] = None, display_name: Optional[str] = None, run_number: Optional[int] = None, status_code: Optional[Union[str, "PipelineStatusCode"]] = None, run_status: Optional[Union[str, "RunStatus"]] = None, status_detail: Optional[str] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, graph_id: Optional[str] = None, experiment_id: Optional[str] = None, experiment_name: Optional[str] = None, is_experiment_archived: Optional[bool] = None, submitted_by: Optional[str] = None, tags: Optional[Dict[str, str]] = None, step_tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, aether_start_time: Optional[datetime.datetime] = None, aether_end_time: Optional[datetime.datetime] = None, run_history_start_time: Optional[datetime.datetime] = None, run_history_end_time: Optional[datetime.datetime] = None, unique_child_run_compute_targets: Optional[List[str]] = None, entity_status: Optional[Union[str, "EntityStatus"]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword pipeline_id: :paramtype pipeline_id: str :keyword run_source: :paramtype run_source: str :keyword run_type: Possible values include: "HTTP", "SDK", "Schedule", "Portal". :paramtype run_type: str or ~flow.models.RunType :keyword parameters: This is a dictionary. :paramtype parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignment: This is a dictionary. :paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword total_steps: :paramtype total_steps: int :keyword logs: This is a dictionary. :paramtype logs: dict[str, str] :keyword user_alias: :paramtype user_alias: str :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword continue_run_on_failed_optional_input: :paramtype continue_run_on_failed_optional_input: bool :keyword default_compute: :paramtype default_compute: ~flow.models.ComputeSetting :keyword default_datastore: :paramtype default_datastore: ~flow.models.DatastoreSetting :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting :keyword pipeline_timeout_seconds: :paramtype pipeline_timeout_seconds: int :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword identity_config: :paramtype identity_config: ~flow.models.IdentitySetting :keyword description: :paramtype description: str :keyword display_name: :paramtype display_name: str :keyword run_number: :paramtype run_number: int :keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown". :paramtype status_code: str or ~flow.models.PipelineStatusCode :keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype run_status: str or ~flow.models.RunStatus :keyword status_detail: :paramtype status_detail: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword graph_id: :paramtype graph_id: str :keyword experiment_id: :paramtype experiment_id: str :keyword experiment_name: :paramtype experiment_name: str :keyword is_experiment_archived: :paramtype is_experiment_archived: bool :keyword submitted_by: :paramtype submitted_by: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword step_tags: This is a dictionary. :paramtype step_tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword aether_start_time: :paramtype aether_start_time: ~datetime.datetime :keyword aether_end_time: :paramtype aether_end_time: ~datetime.datetime :keyword run_history_start_time: :paramtype run_history_start_time: ~datetime.datetime :keyword run_history_end_time: :paramtype run_history_end_time: ~datetime.datetime :keyword unique_child_run_compute_targets: :paramtype unique_child_run_compute_targets: list[str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineRun, self).__init__(**kwargs) self.pipeline_id = pipeline_id self.run_source = run_source self.run_type = run_type self.parameters = parameters self.data_path_assignments = data_path_assignments self.data_set_definition_value_assignment = data_set_definition_value_assignment self.asset_output_settings_assignments = asset_output_settings_assignments self.total_steps = total_steps self.logs = logs self.user_alias = user_alias self.enforce_rerun = enforce_rerun self.continue_run_on_failed_optional_input = continue_run_on_failed_optional_input self.default_compute = default_compute self.default_datastore = default_datastore self.default_cloud_priority = default_cloud_priority self.pipeline_timeout_seconds = pipeline_timeout_seconds self.continue_run_on_step_failure = continue_run_on_step_failure self.identity_config = identity_config self.description = description self.display_name = display_name self.run_number = run_number self.status_code = status_code self.run_status = run_status self.status_detail = status_detail self.start_time = start_time self.end_time = end_time self.graph_id = graph_id self.experiment_id = experiment_id self.experiment_name = experiment_name self.is_experiment_archived = is_experiment_archived self.submitted_by = submitted_by self.tags = tags self.step_tags = step_tags self.properties = properties self.aether_start_time = aether_start_time self.aether_end_time = aether_end_time self.run_history_start_time = run_history_start_time self.run_history_end_time = run_history_end_time self.unique_child_run_compute_targets = unique_child_run_compute_targets self.entity_status = entity_status self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class PipelineRunGraphDetail(msrest.serialization.Model): """PipelineRunGraphDetail. :ivar graph: :vartype graph: ~flow.models.PipelineGraph :ivar graph_nodes_status: This is a dictionary. :vartype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo] """ _attribute_map = { 'graph': {'key': 'graph', 'type': 'PipelineGraph'}, 'graph_nodes_status': {'key': 'graphNodesStatus', 'type': '{GraphNodeStatusInfo}'}, } def __init__( self, *, graph: Optional["PipelineGraph"] = None, graph_nodes_status: Optional[Dict[str, "GraphNodeStatusInfo"]] = None, **kwargs ): """ :keyword graph: :paramtype graph: ~flow.models.PipelineGraph :keyword graph_nodes_status: This is a dictionary. :paramtype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo] """ super(PipelineRunGraphDetail, self).__init__(**kwargs) self.graph = graph self.graph_nodes_status = graph_nodes_status class PipelineRunGraphStatus(msrest.serialization.Model): """PipelineRunGraphStatus. :ivar status: :vartype status: ~flow.models.PipelineStatus :ivar graph_nodes_status: This is a dictionary. :vartype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo] :ivar experiment_id: :vartype experiment_id: str :ivar is_experiment_archived: :vartype is_experiment_archived: bool """ _attribute_map = { 'status': {'key': 'status', 'type': 'PipelineStatus'}, 'graph_nodes_status': {'key': 'graphNodesStatus', 'type': '{GraphNodeStatusInfo}'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'}, } def __init__( self, *, status: Optional["PipelineStatus"] = None, graph_nodes_status: Optional[Dict[str, "GraphNodeStatusInfo"]] = None, experiment_id: Optional[str] = None, is_experiment_archived: Optional[bool] = None, **kwargs ): """ :keyword status: :paramtype status: ~flow.models.PipelineStatus :keyword graph_nodes_status: This is a dictionary. :paramtype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo] :keyword experiment_id: :paramtype experiment_id: str :keyword is_experiment_archived: :paramtype is_experiment_archived: bool """ super(PipelineRunGraphStatus, self).__init__(**kwargs) self.status = status self.graph_nodes_status = graph_nodes_status self.experiment_id = experiment_id self.is_experiment_archived = is_experiment_archived class PipelineRunProfile(msrest.serialization.Model): """PipelineRunProfile. :ivar run_id: :vartype run_id: str :ivar node_id: :vartype node_id: str :ivar run_url: :vartype run_url: str :ivar experiment_name: :vartype experiment_name: str :ivar experiment_id: :vartype experiment_id: str :ivar description: :vartype description: str :ivar status: :vartype status: ~flow.models.PipelineRunStatus :ivar create_time: :vartype create_time: long :ivar start_time: :vartype start_time: long :ivar end_time: :vartype end_time: long :ivar profiling_time: :vartype profiling_time: long :ivar step_runs_profile: :vartype step_runs_profile: list[~flow.models.StepRunProfile] :ivar sub_pipeline_run_profile: :vartype sub_pipeline_run_profile: list[~flow.models.PipelineRunProfile] """ _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, 'node_id': {'key': 'nodeId', 'type': 'str'}, 'run_url': {'key': 'runUrl', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'status': {'key': 'status', 'type': 'PipelineRunStatus'}, 'create_time': {'key': 'createTime', 'type': 'long'}, 'start_time': {'key': 'startTime', 'type': 'long'}, 'end_time': {'key': 'endTime', 'type': 'long'}, 'profiling_time': {'key': 'profilingTime', 'type': 'long'}, 'step_runs_profile': {'key': 'stepRunsProfile', 'type': '[StepRunProfile]'}, 'sub_pipeline_run_profile': {'key': 'subPipelineRunProfile', 'type': '[PipelineRunProfile]'}, } def __init__( self, *, run_id: Optional[str] = None, node_id: Optional[str] = None, run_url: Optional[str] = None, experiment_name: Optional[str] = None, experiment_id: Optional[str] = None, description: Optional[str] = None, status: Optional["PipelineRunStatus"] = None, create_time: Optional[int] = None, start_time: Optional[int] = None, end_time: Optional[int] = None, profiling_time: Optional[int] = None, step_runs_profile: Optional[List["StepRunProfile"]] = None, sub_pipeline_run_profile: Optional[List["PipelineRunProfile"]] = None, **kwargs ): """ :keyword run_id: :paramtype run_id: str :keyword node_id: :paramtype node_id: str :keyword run_url: :paramtype run_url: str :keyword experiment_name: :paramtype experiment_name: str :keyword experiment_id: :paramtype experiment_id: str :keyword description: :paramtype description: str :keyword status: :paramtype status: ~flow.models.PipelineRunStatus :keyword create_time: :paramtype create_time: long :keyword start_time: :paramtype start_time: long :keyword end_time: :paramtype end_time: long :keyword profiling_time: :paramtype profiling_time: long :keyword step_runs_profile: :paramtype step_runs_profile: list[~flow.models.StepRunProfile] :keyword sub_pipeline_run_profile: :paramtype sub_pipeline_run_profile: list[~flow.models.PipelineRunProfile] """ super(PipelineRunProfile, self).__init__(**kwargs) self.run_id = run_id self.node_id = node_id self.run_url = run_url self.experiment_name = experiment_name self.experiment_id = experiment_id self.description = description self.status = status self.create_time = create_time self.start_time = start_time self.end_time = end_time self.profiling_time = profiling_time self.step_runs_profile = step_runs_profile self.sub_pipeline_run_profile = sub_pipeline_run_profile class PipelineRunStatus(msrest.serialization.Model): """PipelineRunStatus. :ivar status_code: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :vartype status_code: str or ~flow.models.PipelineRunStatusCode :ivar status_detail: :vartype status_detail: str :ivar creation_time: :vartype creation_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime """ _attribute_map = { 'status_code': {'key': 'statusCode', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'creation_time': {'key': 'creationTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, } def __init__( self, *, status_code: Optional[Union[str, "PipelineRunStatusCode"]] = None, status_detail: Optional[str] = None, creation_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, **kwargs ): """ :keyword status_code: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :paramtype status_code: str or ~flow.models.PipelineRunStatusCode :keyword status_detail: :paramtype status_detail: str :keyword creation_time: :paramtype creation_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime """ super(PipelineRunStatus, self).__init__(**kwargs) self.status_code = status_code self.status_detail = status_detail self.creation_time = creation_time self.end_time = end_time class PipelineRunStepDetails(msrest.serialization.Model): """PipelineRunStepDetails. :ivar run_id: :vartype run_id: str :ivar target: :vartype target: str :ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype status: str or ~flow.models.RunStatus :ivar status_detail: :vartype status_detail: str :ivar parent_run_id: :vartype parent_run_id: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar is_reused: :vartype is_reused: bool :ivar logs: This is a dictionary. :vartype logs: dict[str, str] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, str] :ivar snapshot_info: :vartype snapshot_info: ~flow.models.SnapshotInfo :ivar input_datasets: :vartype input_datasets: list[~flow.models.DatasetLineage] :ivar output_datasets: :vartype output_datasets: list[~flow.models.OutputDatasetLineage] """ _validation = { 'input_datasets': {'unique': True}, 'output_datasets': {'unique': True}, } _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'is_reused': {'key': 'isReused', 'type': 'bool'}, 'logs': {'key': 'logs', 'type': '{str}'}, 'outputs': {'key': 'outputs', 'type': '{str}'}, 'snapshot_info': {'key': 'snapshotInfo', 'type': 'SnapshotInfo'}, 'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'}, 'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'}, } def __init__( self, *, run_id: Optional[str] = None, target: Optional[str] = None, status: Optional[Union[str, "RunStatus"]] = None, status_detail: Optional[str] = None, parent_run_id: Optional[str] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, is_reused: Optional[bool] = None, logs: Optional[Dict[str, str]] = None, outputs: Optional[Dict[str, str]] = None, snapshot_info: Optional["SnapshotInfo"] = None, input_datasets: Optional[List["DatasetLineage"]] = None, output_datasets: Optional[List["OutputDatasetLineage"]] = None, **kwargs ): """ :keyword run_id: :paramtype run_id: str :keyword target: :paramtype target: str :keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype status: str or ~flow.models.RunStatus :keyword status_detail: :paramtype status_detail: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword is_reused: :paramtype is_reused: bool :keyword logs: This is a dictionary. :paramtype logs: dict[str, str] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, str] :keyword snapshot_info: :paramtype snapshot_info: ~flow.models.SnapshotInfo :keyword input_datasets: :paramtype input_datasets: list[~flow.models.DatasetLineage] :keyword output_datasets: :paramtype output_datasets: list[~flow.models.OutputDatasetLineage] """ super(PipelineRunStepDetails, self).__init__(**kwargs) self.run_id = run_id self.target = target self.status = status self.status_detail = status_detail self.parent_run_id = parent_run_id self.start_time = start_time self.end_time = end_time self.is_reused = is_reused self.logs = logs self.outputs = outputs self.snapshot_info = snapshot_info self.input_datasets = input_datasets self.output_datasets = output_datasets class PipelineRunSummary(msrest.serialization.Model): """PipelineRunSummary. :ivar description: :vartype description: str :ivar display_name: :vartype display_name: str :ivar run_number: :vartype run_number: int :ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown". :vartype status_code: str or ~flow.models.PipelineStatusCode :ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype run_status: str or ~flow.models.RunStatus :ivar status_detail: :vartype status_detail: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar graph_id: :vartype graph_id: str :ivar experiment_id: :vartype experiment_id: str :ivar experiment_name: :vartype experiment_name: str :ivar is_experiment_archived: :vartype is_experiment_archived: bool :ivar submitted_by: :vartype submitted_by: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar step_tags: This is a dictionary. :vartype step_tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar aether_start_time: :vartype aether_start_time: ~datetime.datetime :ivar aether_end_time: :vartype aether_end_time: ~datetime.datetime :ivar run_history_start_time: :vartype run_history_start_time: ~datetime.datetime :ivar run_history_end_time: :vartype run_history_end_time: ~datetime.datetime :ivar unique_child_run_compute_targets: :vartype unique_child_run_compute_targets: list[str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _validation = { 'unique_child_run_compute_targets': {'unique': True}, } _attribute_map = { 'description': {'key': 'description', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'run_number': {'key': 'runNumber', 'type': 'int'}, 'status_code': {'key': 'statusCode', 'type': 'str'}, 'run_status': {'key': 'runStatus', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'graph_id': {'key': 'graphId', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'}, 'submitted_by': {'key': 'submittedBy', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'step_tags': {'key': 'stepTags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'}, 'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'}, 'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'}, 'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'}, 'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, description: Optional[str] = None, display_name: Optional[str] = None, run_number: Optional[int] = None, status_code: Optional[Union[str, "PipelineStatusCode"]] = None, run_status: Optional[Union[str, "RunStatus"]] = None, status_detail: Optional[str] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, graph_id: Optional[str] = None, experiment_id: Optional[str] = None, experiment_name: Optional[str] = None, is_experiment_archived: Optional[bool] = None, submitted_by: Optional[str] = None, tags: Optional[Dict[str, str]] = None, step_tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, aether_start_time: Optional[datetime.datetime] = None, aether_end_time: Optional[datetime.datetime] = None, run_history_start_time: Optional[datetime.datetime] = None, run_history_end_time: Optional[datetime.datetime] = None, unique_child_run_compute_targets: Optional[List[str]] = None, entity_status: Optional[Union[str, "EntityStatus"]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword description: :paramtype description: str :keyword display_name: :paramtype display_name: str :keyword run_number: :paramtype run_number: int :keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown". :paramtype status_code: str or ~flow.models.PipelineStatusCode :keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype run_status: str or ~flow.models.RunStatus :keyword status_detail: :paramtype status_detail: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword graph_id: :paramtype graph_id: str :keyword experiment_id: :paramtype experiment_id: str :keyword experiment_name: :paramtype experiment_name: str :keyword is_experiment_archived: :paramtype is_experiment_archived: bool :keyword submitted_by: :paramtype submitted_by: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword step_tags: This is a dictionary. :paramtype step_tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword aether_start_time: :paramtype aether_start_time: ~datetime.datetime :keyword aether_end_time: :paramtype aether_end_time: ~datetime.datetime :keyword run_history_start_time: :paramtype run_history_start_time: ~datetime.datetime :keyword run_history_end_time: :paramtype run_history_end_time: ~datetime.datetime :keyword unique_child_run_compute_targets: :paramtype unique_child_run_compute_targets: list[str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineRunSummary, self).__init__(**kwargs) self.description = description self.display_name = display_name self.run_number = run_number self.status_code = status_code self.run_status = run_status self.status_detail = status_detail self.start_time = start_time self.end_time = end_time self.graph_id = graph_id self.experiment_id = experiment_id self.experiment_name = experiment_name self.is_experiment_archived = is_experiment_archived self.submitted_by = submitted_by self.tags = tags self.step_tags = step_tags self.properties = properties self.aether_start_time = aether_start_time self.aether_end_time = aether_end_time self.run_history_start_time = run_history_start_time self.run_history_end_time = run_history_end_time self.unique_child_run_compute_targets = unique_child_run_compute_targets self.entity_status = entity_status self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class PipelineStatus(msrest.serialization.Model): """PipelineStatus. Variables are only populated by the server, and will be ignored when sending a request. :ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown". :vartype status_code: str or ~flow.models.PipelineStatusCode :ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype run_status: str or ~flow.models.RunStatus :ivar status_detail: :vartype status_detail: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar is_terminal_state: :vartype is_terminal_state: bool """ _validation = { 'is_terminal_state': {'readonly': True}, } _attribute_map = { 'status_code': {'key': 'statusCode', 'type': 'str'}, 'run_status': {'key': 'runStatus', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'is_terminal_state': {'key': 'isTerminalState', 'type': 'bool'}, } def __init__( self, *, status_code: Optional[Union[str, "PipelineStatusCode"]] = None, run_status: Optional[Union[str, "RunStatus"]] = None, status_detail: Optional[str] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, **kwargs ): """ :keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running", "Failed", "Finished", "Canceled", "Throttled", "Unknown". :paramtype status_code: str or ~flow.models.PipelineStatusCode :keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype run_status: str or ~flow.models.RunStatus :keyword status_detail: :paramtype status_detail: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime """ super(PipelineStatus, self).__init__(**kwargs) self.status_code = status_code self.run_status = run_status self.status_detail = status_detail self.start_time = start_time self.end_time = end_time self.is_terminal_state = None class PipelineStepRun(msrest.serialization.Model): """PipelineStepRun. :ivar step_name: :vartype step_name: str :ivar run_number: :vartype run_number: int :ivar run_id: :vartype run_id: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype run_status: str or ~flow.models.RunStatus :ivar compute_target: :vartype compute_target: str :ivar compute_type: :vartype compute_type: str :ivar run_type: :vartype run_type: str :ivar step_type: :vartype step_type: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar is_reused: :vartype is_reused: bool :ivar display_name: :vartype display_name: str """ _attribute_map = { 'step_name': {'key': 'stepName', 'type': 'str'}, 'run_number': {'key': 'runNumber', 'type': 'int'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'run_status': {'key': 'runStatus', 'type': 'str'}, 'compute_target': {'key': 'computeTarget', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'run_type': {'key': 'runType', 'type': 'str'}, 'step_type': {'key': 'stepType', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'is_reused': {'key': 'isReused', 'type': 'bool'}, 'display_name': {'key': 'displayName', 'type': 'str'}, } def __init__( self, *, step_name: Optional[str] = None, run_number: Optional[int] = None, run_id: Optional[str] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, run_status: Optional[Union[str, "RunStatus"]] = None, compute_target: Optional[str] = None, compute_type: Optional[str] = None, run_type: Optional[str] = None, step_type: Optional[str] = None, tags: Optional[Dict[str, str]] = None, is_reused: Optional[bool] = None, display_name: Optional[str] = None, **kwargs ): """ :keyword step_name: :paramtype step_name: str :keyword run_number: :paramtype run_number: int :keyword run_id: :paramtype run_id: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype run_status: str or ~flow.models.RunStatus :keyword compute_target: :paramtype compute_target: str :keyword compute_type: :paramtype compute_type: str :keyword run_type: :paramtype run_type: str :keyword step_type: :paramtype step_type: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword is_reused: :paramtype is_reused: bool :keyword display_name: :paramtype display_name: str """ super(PipelineStepRun, self).__init__(**kwargs) self.step_name = step_name self.run_number = run_number self.run_id = run_id self.start_time = start_time self.end_time = end_time self.run_status = run_status self.compute_target = compute_target self.compute_type = compute_type self.run_type = run_type self.step_type = step_type self.tags = tags self.is_reused = is_reused self.display_name = display_name class PipelineStepRunOutputs(msrest.serialization.Model): """PipelineStepRunOutputs. :ivar outputs: This is a dictionary. :vartype outputs: dict[str, str] :ivar port_outputs: This is a dictionary. :vartype port_outputs: dict[str, ~flow.models.PortOutputInfo] """ _attribute_map = { 'outputs': {'key': 'outputs', 'type': '{str}'}, 'port_outputs': {'key': 'portOutputs', 'type': '{PortOutputInfo}'}, } def __init__( self, *, outputs: Optional[Dict[str, str]] = None, port_outputs: Optional[Dict[str, "PortOutputInfo"]] = None, **kwargs ): """ :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, str] :keyword port_outputs: This is a dictionary. :paramtype port_outputs: dict[str, ~flow.models.PortOutputInfo] """ super(PipelineStepRunOutputs, self).__init__(**kwargs) self.outputs = outputs self.port_outputs = port_outputs class PipelineSubDraft(msrest.serialization.Model): """PipelineSubDraft. :ivar parent_graph_draft_id: :vartype parent_graph_draft_id: str :ivar parent_node_id: :vartype parent_node_id: str :ivar graph_detail: :vartype graph_detail: ~flow.models.PipelineRunGraphDetail :ivar module_dto: :vartype module_dto: ~flow.models.ModuleDto :ivar name: :vartype name: str :ivar last_edited_by: :vartype last_edited_by: str :ivar created_by: :vartype created_by: str :ivar description: :vartype description: str :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'parent_graph_draft_id': {'key': 'parentGraphDraftId', 'type': 'str'}, 'parent_node_id': {'key': 'parentNodeId', 'type': 'str'}, 'graph_detail': {'key': 'graphDetail', 'type': 'PipelineRunGraphDetail'}, 'module_dto': {'key': 'moduleDto', 'type': 'ModuleDto'}, 'name': {'key': 'name', 'type': 'str'}, 'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'}, 'created_by': {'key': 'createdBy', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, parent_graph_draft_id: Optional[str] = None, parent_node_id: Optional[str] = None, graph_detail: Optional["PipelineRunGraphDetail"] = None, module_dto: Optional["ModuleDto"] = None, name: Optional[str] = None, last_edited_by: Optional[str] = None, created_by: Optional[str] = None, description: Optional[str] = None, pipeline_type: Optional[Union[str, "PipelineType"]] = None, pipeline_draft_mode: Optional[Union[str, "PipelineDraftMode"]] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, entity_status: Optional[Union[str, "EntityStatus"]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword parent_graph_draft_id: :paramtype parent_graph_draft_id: str :keyword parent_node_id: :paramtype parent_node_id: str :keyword graph_detail: :paramtype graph_detail: ~flow.models.PipelineRunGraphDetail :keyword module_dto: :paramtype module_dto: ~flow.models.ModuleDto :keyword name: :paramtype name: str :keyword last_edited_by: :paramtype last_edited_by: str :keyword created_by: :paramtype created_by: str :keyword description: :paramtype description: str :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PipelineSubDraft, self).__init__(**kwargs) self.parent_graph_draft_id = parent_graph_draft_id self.parent_node_id = parent_node_id self.graph_detail = graph_detail self.module_dto = module_dto self.name = name self.last_edited_by = last_edited_by self.created_by = created_by self.description = description self.pipeline_type = pipeline_type self.pipeline_draft_mode = pipeline_draft_mode self.tags = tags self.properties = properties self.entity_status = entity_status self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class PolicyValidationResponse(msrest.serialization.Model): """PolicyValidationResponse. :ivar error_response: The error response. :vartype error_response: ~flow.models.ErrorResponse :ivar next_action_interval_in_seconds: :vartype next_action_interval_in_seconds: int :ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent". :vartype action_type: str or ~flow.models.ActionType """ _attribute_map = { 'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'}, 'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'}, 'action_type': {'key': 'actionType', 'type': 'str'}, } def __init__( self, *, error_response: Optional["ErrorResponse"] = None, next_action_interval_in_seconds: Optional[int] = None, action_type: Optional[Union[str, "ActionType"]] = None, **kwargs ): """ :keyword error_response: The error response. :paramtype error_response: ~flow.models.ErrorResponse :keyword next_action_interval_in_seconds: :paramtype next_action_interval_in_seconds: int :keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent". :paramtype action_type: str or ~flow.models.ActionType """ super(PolicyValidationResponse, self).__init__(**kwargs) self.error_response = error_response self.next_action_interval_in_seconds = next_action_interval_in_seconds self.action_type = action_type class PortInfo(msrest.serialization.Model): """PortInfo. :ivar node_id: :vartype node_id: str :ivar port_name: :vartype port_name: str :ivar graph_port_name: :vartype graph_port_name: str :ivar is_parameter: :vartype is_parameter: bool :ivar web_service_port: :vartype web_service_port: str """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, 'graph_port_name': {'key': 'graphPortName', 'type': 'str'}, 'is_parameter': {'key': 'isParameter', 'type': 'bool'}, 'web_service_port': {'key': 'webServicePort', 'type': 'str'}, } def __init__( self, *, node_id: Optional[str] = None, port_name: Optional[str] = None, graph_port_name: Optional[str] = None, is_parameter: Optional[bool] = None, web_service_port: Optional[str] = None, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword port_name: :paramtype port_name: str :keyword graph_port_name: :paramtype graph_port_name: str :keyword is_parameter: :paramtype is_parameter: bool :keyword web_service_port: :paramtype web_service_port: str """ super(PortInfo, self).__init__(**kwargs) self.node_id = node_id self.port_name = port_name self.graph_port_name = graph_port_name self.is_parameter = is_parameter self.web_service_port = web_service_port class PortOutputInfo(msrest.serialization.Model): """PortOutputInfo. :ivar container_uri: :vartype container_uri: str :ivar relative_path: :vartype relative_path: str :ivar preview_params: :vartype preview_params: str :ivar model_output_path: :vartype model_output_path: str :ivar data_store_name: :vartype data_store_name: str :ivar data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :vartype data_reference_type: str or ~flow.models.DataReferenceType :ivar is_file: :vartype is_file: bool :ivar supported_actions: :vartype supported_actions: list[str or ~flow.models.PortAction] """ _attribute_map = { 'container_uri': {'key': 'containerUri', 'type': 'str'}, 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'preview_params': {'key': 'previewParams', 'type': 'str'}, 'model_output_path': {'key': 'modelOutputPath', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_reference_type': {'key': 'dataReferenceType', 'type': 'str'}, 'is_file': {'key': 'isFile', 'type': 'bool'}, 'supported_actions': {'key': 'supportedActions', 'type': '[str]'}, } def __init__( self, *, container_uri: Optional[str] = None, relative_path: Optional[str] = None, preview_params: Optional[str] = None, model_output_path: Optional[str] = None, data_store_name: Optional[str] = None, data_reference_type: Optional[Union[str, "DataReferenceType"]] = None, is_file: Optional[bool] = None, supported_actions: Optional[List[Union[str, "PortAction"]]] = None, **kwargs ): """ :keyword container_uri: :paramtype container_uri: str :keyword relative_path: :paramtype relative_path: str :keyword preview_params: :paramtype preview_params: str :keyword model_output_path: :paramtype model_output_path: str :keyword data_store_name: :paramtype data_store_name: str :keyword data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase", "Custom", "Hdfs". :paramtype data_reference_type: str or ~flow.models.DataReferenceType :keyword is_file: :paramtype is_file: bool :keyword supported_actions: :paramtype supported_actions: list[str or ~flow.models.PortAction] """ super(PortOutputInfo, self).__init__(**kwargs) self.container_uri = container_uri self.relative_path = relative_path self.preview_params = preview_params self.model_output_path = model_output_path self.data_store_name = data_store_name self.data_reference_type = data_reference_type self.is_file = is_file self.supported_actions = supported_actions class PriorityConfig(msrest.serialization.Model): """PriorityConfig. :ivar job_priority: :vartype job_priority: int :ivar is_preemptible: :vartype is_preemptible: bool :ivar node_count_set: :vartype node_count_set: list[int] :ivar scale_interval: :vartype scale_interval: int """ _attribute_map = { 'job_priority': {'key': 'jobPriority', 'type': 'int'}, 'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'}, 'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'}, 'scale_interval': {'key': 'scaleInterval', 'type': 'int'}, } def __init__( self, *, job_priority: Optional[int] = None, is_preemptible: Optional[bool] = None, node_count_set: Optional[List[int]] = None, scale_interval: Optional[int] = None, **kwargs ): """ :keyword job_priority: :paramtype job_priority: int :keyword is_preemptible: :paramtype is_preemptible: bool :keyword node_count_set: :paramtype node_count_set: list[int] :keyword scale_interval: :paramtype scale_interval: int """ super(PriorityConfig, self).__init__(**kwargs) self.job_priority = job_priority self.is_preemptible = is_preemptible self.node_count_set = node_count_set self.scale_interval = scale_interval class PriorityConfiguration(msrest.serialization.Model): """PriorityConfiguration. :ivar cloud_priority: :vartype cloud_priority: int :ivar string_type_priority: :vartype string_type_priority: str """ _attribute_map = { 'cloud_priority': {'key': 'cloudPriority', 'type': 'int'}, 'string_type_priority': {'key': 'stringTypePriority', 'type': 'str'}, } def __init__( self, *, cloud_priority: Optional[int] = None, string_type_priority: Optional[str] = None, **kwargs ): """ :keyword cloud_priority: :paramtype cloud_priority: int :keyword string_type_priority: :paramtype string_type_priority: str """ super(PriorityConfiguration, self).__init__(**kwargs) self.cloud_priority = cloud_priority self.string_type_priority = string_type_priority class PromoteDataSetRequest(msrest.serialization.Model): """PromoteDataSetRequest. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar module_node_id: :vartype module_node_id: str :ivar step_run_id: :vartype step_run_id: str :ivar output_port_name: :vartype output_port_name: str :ivar model_output_path: :vartype model_output_path: str :ivar data_type_id: :vartype data_type_id: str :ivar dataset_type: :vartype dataset_type: str :ivar data_store_name: :vartype data_store_name: str :ivar output_relative_path: :vartype output_relative_path: str :ivar pipeline_run_id: :vartype pipeline_run_id: str :ivar root_pipeline_run_id: :vartype root_pipeline_run_id: str :ivar experiment_name: :vartype experiment_name: str :ivar experiment_id: :vartype experiment_id: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'module_node_id': {'key': 'moduleNodeId', 'type': 'str'}, 'step_run_id': {'key': 'stepRunId', 'type': 'str'}, 'output_port_name': {'key': 'outputPortName', 'type': 'str'}, 'model_output_path': {'key': 'modelOutputPath', 'type': 'str'}, 'data_type_id': {'key': 'dataTypeId', 'type': 'str'}, 'dataset_type': {'key': 'datasetType', 'type': 'str'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'output_relative_path': {'key': 'outputRelativePath', 'type': 'str'}, 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, 'root_pipeline_run_id': {'key': 'rootPipelineRunId', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, description: Optional[str] = None, module_node_id: Optional[str] = None, step_run_id: Optional[str] = None, output_port_name: Optional[str] = None, model_output_path: Optional[str] = None, data_type_id: Optional[str] = None, dataset_type: Optional[str] = None, data_store_name: Optional[str] = None, output_relative_path: Optional[str] = None, pipeline_run_id: Optional[str] = None, root_pipeline_run_id: Optional[str] = None, experiment_name: Optional[str] = None, experiment_id: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword module_node_id: :paramtype module_node_id: str :keyword step_run_id: :paramtype step_run_id: str :keyword output_port_name: :paramtype output_port_name: str :keyword model_output_path: :paramtype model_output_path: str :keyword data_type_id: :paramtype data_type_id: str :keyword dataset_type: :paramtype dataset_type: str :keyword data_store_name: :paramtype data_store_name: str :keyword output_relative_path: :paramtype output_relative_path: str :keyword pipeline_run_id: :paramtype pipeline_run_id: str :keyword root_pipeline_run_id: :paramtype root_pipeline_run_id: str :keyword experiment_name: :paramtype experiment_name: str :keyword experiment_id: :paramtype experiment_id: str """ super(PromoteDataSetRequest, self).__init__(**kwargs) self.name = name self.description = description self.module_node_id = module_node_id self.step_run_id = step_run_id self.output_port_name = output_port_name self.model_output_path = model_output_path self.data_type_id = data_type_id self.dataset_type = dataset_type self.data_store_name = data_store_name self.output_relative_path = output_relative_path self.pipeline_run_id = pipeline_run_id self.root_pipeline_run_id = root_pipeline_run_id self.experiment_name = experiment_name self.experiment_id = experiment_id class ProviderEntity(msrest.serialization.Model): """ProviderEntity. :ivar provider: :vartype provider: str :ivar module: :vartype module: str :ivar connection_type: :vartype connection_type: list[str or ~flow.models.ConnectionType] :ivar apis: :vartype apis: list[~flow.models.ApiAndParameters] """ _attribute_map = { 'provider': {'key': 'provider', 'type': 'str'}, 'module': {'key': 'module', 'type': 'str'}, 'connection_type': {'key': 'connection_type', 'type': '[str]'}, 'apis': {'key': 'apis', 'type': '[ApiAndParameters]'}, } def __init__( self, *, provider: Optional[str] = None, module: Optional[str] = None, connection_type: Optional[List[Union[str, "ConnectionType"]]] = None, apis: Optional[List["ApiAndParameters"]] = None, **kwargs ): """ :keyword provider: :paramtype provider: str :keyword module: :paramtype module: str :keyword connection_type: :paramtype connection_type: list[str or ~flow.models.ConnectionType] :keyword apis: :paramtype apis: list[~flow.models.ApiAndParameters] """ super(ProviderEntity, self).__init__(**kwargs) self.provider = provider self.module = module self.connection_type = connection_type self.apis = apis class PublishedPipeline(msrest.serialization.Model): """PublishedPipeline. :ivar total_run_steps: :vartype total_run_steps: int :ivar total_runs: :vartype total_runs: int :ivar parameters: This is a dictionary. :vartype parameters: dict[str, str] :ivar data_set_definition_value_assignment: This is a dictionary. :vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue] :ivar rest_endpoint: :vartype rest_endpoint: str :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar graph_id: :vartype graph_id: str :ivar published_date: :vartype published_date: ~datetime.datetime :ivar last_run_time: :vartype last_run_time: ~datetime.datetime :ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :vartype last_run_status: str or ~flow.models.PipelineRunStatusCode :ivar published_by: :vartype published_by: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar version: :vartype version: str :ivar is_default: :vartype is_default: bool :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'total_run_steps': {'key': 'totalRunSteps', 'type': 'int'}, 'total_runs': {'key': 'totalRuns', 'type': 'int'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'}, 'rest_endpoint': {'key': 'restEndpoint', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'graph_id': {'key': 'graphId', 'type': 'str'}, 'published_date': {'key': 'publishedDate', 'type': 'iso-8601'}, 'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'}, 'last_run_status': {'key': 'lastRunStatus', 'type': 'str'}, 'published_by': {'key': 'publishedBy', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'version': {'key': 'version', 'type': 'str'}, 'is_default': {'key': 'isDefault', 'type': 'bool'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, total_run_steps: Optional[int] = None, total_runs: Optional[int] = None, parameters: Optional[Dict[str, str]] = None, data_set_definition_value_assignment: Optional[Dict[str, "DataSetDefinitionValue"]] = None, rest_endpoint: Optional[str] = None, name: Optional[str] = None, description: Optional[str] = None, graph_id: Optional[str] = None, published_date: Optional[datetime.datetime] = None, last_run_time: Optional[datetime.datetime] = None, last_run_status: Optional[Union[str, "PipelineRunStatusCode"]] = None, published_by: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, version: Optional[str] = None, is_default: Optional[bool] = None, entity_status: Optional[Union[str, "EntityStatus"]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword total_run_steps: :paramtype total_run_steps: int :keyword total_runs: :paramtype total_runs: int :keyword parameters: This is a dictionary. :paramtype parameters: dict[str, str] :keyword data_set_definition_value_assignment: This is a dictionary. :paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue] :keyword rest_endpoint: :paramtype rest_endpoint: str :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword graph_id: :paramtype graph_id: str :keyword published_date: :paramtype published_date: ~datetime.datetime :keyword last_run_time: :paramtype last_run_time: ~datetime.datetime :keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode :keyword published_by: :paramtype published_by: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword version: :paramtype version: str :keyword is_default: :paramtype is_default: bool :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PublishedPipeline, self).__init__(**kwargs) self.total_run_steps = total_run_steps self.total_runs = total_runs self.parameters = parameters self.data_set_definition_value_assignment = data_set_definition_value_assignment self.rest_endpoint = rest_endpoint self.name = name self.description = description self.graph_id = graph_id self.published_date = published_date self.last_run_time = last_run_time self.last_run_status = last_run_status self.published_by = published_by self.tags = tags self.properties = properties self.version = version self.is_default = is_default self.entity_status = entity_status self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class PublishedPipelineSummary(msrest.serialization.Model): """PublishedPipelineSummary. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar graph_id: :vartype graph_id: str :ivar published_date: :vartype published_date: ~datetime.datetime :ivar last_run_time: :vartype last_run_time: ~datetime.datetime :ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :vartype last_run_status: str or ~flow.models.PipelineRunStatusCode :ivar published_by: :vartype published_by: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar version: :vartype version: str :ivar is_default: :vartype is_default: bool :ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled". :vartype entity_status: str or ~flow.models.EntityStatus :ivar id: :vartype id: str :ivar etag: :vartype etag: str :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'graph_id': {'key': 'graphId', 'type': 'str'}, 'published_date': {'key': 'publishedDate', 'type': 'iso-8601'}, 'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'}, 'last_run_status': {'key': 'lastRunStatus', 'type': 'str'}, 'published_by': {'key': 'publishedBy', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'version': {'key': 'version', 'type': 'str'}, 'is_default': {'key': 'isDefault', 'type': 'bool'}, 'entity_status': {'key': 'entityStatus', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, } def __init__( self, *, name: Optional[str] = None, description: Optional[str] = None, graph_id: Optional[str] = None, published_date: Optional[datetime.datetime] = None, last_run_time: Optional[datetime.datetime] = None, last_run_status: Optional[Union[str, "PipelineRunStatusCode"]] = None, published_by: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, version: Optional[str] = None, is_default: Optional[bool] = None, entity_status: Optional[Union[str, "EntityStatus"]] = None, id: Optional[str] = None, etag: Optional[str] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword graph_id: :paramtype graph_id: str :keyword published_date: :paramtype published_date: ~datetime.datetime :keyword last_run_time: :paramtype last_run_time: ~datetime.datetime :keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished", "Canceled", "Queued", "CancelRequested". :paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode :keyword published_by: :paramtype published_by: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword version: :paramtype version: str :keyword is_default: :paramtype is_default: bool :keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled". :paramtype entity_status: str or ~flow.models.EntityStatus :keyword id: :paramtype id: str :keyword etag: :paramtype etag: str :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime """ super(PublishedPipelineSummary, self).__init__(**kwargs) self.name = name self.description = description self.graph_id = graph_id self.published_date = published_date self.last_run_time = last_run_time self.last_run_status = last_run_status self.published_by = published_by self.tags = tags self.properties = properties self.version = version self.is_default = is_default self.entity_status = entity_status self.id = id self.etag = etag self.created_date = created_date self.last_modified_date = last_modified_date class PythonInterfaceMapping(msrest.serialization.Model): """PythonInterfaceMapping. :ivar name: :vartype name: str :ivar name_in_yaml: :vartype name_in_yaml: str :ivar argument_name: :vartype argument_name: str :ivar command_line_option: :vartype command_line_option: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'name_in_yaml': {'key': 'nameInYaml', 'type': 'str'}, 'argument_name': {'key': 'argumentName', 'type': 'str'}, 'command_line_option': {'key': 'commandLineOption', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, name_in_yaml: Optional[str] = None, argument_name: Optional[str] = None, command_line_option: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword name_in_yaml: :paramtype name_in_yaml: str :keyword argument_name: :paramtype argument_name: str :keyword command_line_option: :paramtype command_line_option: str """ super(PythonInterfaceMapping, self).__init__(**kwargs) self.name = name self.name_in_yaml = name_in_yaml self.argument_name = argument_name self.command_line_option = command_line_option class PythonPyPiOrRCranLibraryDto(msrest.serialization.Model): """PythonPyPiOrRCranLibraryDto. :ivar package: :vartype package: str :ivar repo: :vartype repo: str """ _attribute_map = { 'package': {'key': 'package', 'type': 'str'}, 'repo': {'key': 'repo', 'type': 'str'}, } def __init__( self, *, package: Optional[str] = None, repo: Optional[str] = None, **kwargs ): """ :keyword package: :paramtype package: str :keyword repo: :paramtype repo: str """ super(PythonPyPiOrRCranLibraryDto, self).__init__(**kwargs) self.package = package self.repo = repo class PythonSection(msrest.serialization.Model): """PythonSection. :ivar interpreter_path: :vartype interpreter_path: str :ivar user_managed_dependencies: :vartype user_managed_dependencies: bool :ivar conda_dependencies: Anything. :vartype conda_dependencies: any :ivar base_conda_environment: :vartype base_conda_environment: str """ _attribute_map = { 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'}, 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'}, 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'}, 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'}, } def __init__( self, *, interpreter_path: Optional[str] = None, user_managed_dependencies: Optional[bool] = None, conda_dependencies: Optional[Any] = None, base_conda_environment: Optional[str] = None, **kwargs ): """ :keyword interpreter_path: :paramtype interpreter_path: str :keyword user_managed_dependencies: :paramtype user_managed_dependencies: bool :keyword conda_dependencies: Anything. :paramtype conda_dependencies: any :keyword base_conda_environment: :paramtype base_conda_environment: str """ super(PythonSection, self).__init__(**kwargs) self.interpreter_path = interpreter_path self.user_managed_dependencies = user_managed_dependencies self.conda_dependencies = conda_dependencies self.base_conda_environment = base_conda_environment class PyTorchConfiguration(msrest.serialization.Model): """PyTorchConfiguration. :ivar communication_backend: :vartype communication_backend: str :ivar process_count: :vartype process_count: int """ _attribute_map = { 'communication_backend': {'key': 'communicationBackend', 'type': 'str'}, 'process_count': {'key': 'processCount', 'type': 'int'}, } def __init__( self, *, communication_backend: Optional[str] = None, process_count: Optional[int] = None, **kwargs ): """ :keyword communication_backend: :paramtype communication_backend: str :keyword process_count: :paramtype process_count: int """ super(PyTorchConfiguration, self).__init__(**kwargs) self.communication_backend = communication_backend self.process_count = process_count class QueueingInfo(msrest.serialization.Model): """QueueingInfo. :ivar code: :vartype code: str :ivar message: :vartype message: str :ivar last_refresh_timestamp: :vartype last_refresh_timestamp: ~datetime.datetime """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'last_refresh_timestamp': {'key': 'lastRefreshTimestamp', 'type': 'iso-8601'}, } def __init__( self, *, code: Optional[str] = None, message: Optional[str] = None, last_refresh_timestamp: Optional[datetime.datetime] = None, **kwargs ): """ :keyword code: :paramtype code: str :keyword message: :paramtype message: str :keyword last_refresh_timestamp: :paramtype last_refresh_timestamp: ~datetime.datetime """ super(QueueingInfo, self).__init__(**kwargs) self.code = code self.message = message self.last_refresh_timestamp = last_refresh_timestamp class RawComponentDto(msrest.serialization.Model): """RawComponentDto. :ivar component_schema: :vartype component_schema: str :ivar is_anonymous: :vartype is_anonymous: bool :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar type: Possible values include: "Unknown", "CommandComponent", "Command". :vartype type: str or ~flow.models.ComponentType :ivar component_type_version: :vartype component_type_version: str :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar is_deterministic: :vartype is_deterministic: bool :ivar successful_return_code: :vartype successful_return_code: str :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.ComponentInput] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.ComponentOutput] :ivar command: :vartype command: str :ivar environment_name: :vartype environment_name: str :ivar environment_version: :vartype environment_version: str :ivar snapshot_id: :vartype snapshot_id: str :ivar created_by: :vartype created_by: ~flow.models.SchemaContractsCreatedBy :ivar last_modified_by: :vartype last_modified_by: ~flow.models.SchemaContractsCreatedBy :ivar created_date: :vartype created_date: ~datetime.datetime :ivar last_modified_date: :vartype last_modified_date: ~datetime.datetime :ivar component_internal_id: :vartype component_internal_id: str """ _attribute_map = { 'component_schema': {'key': 'componentSchema', 'type': 'str'}, 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'component_type_version': {'key': 'componentTypeVersion', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'}, 'successful_return_code': {'key': 'successfulReturnCode', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '{ComponentInput}'}, 'outputs': {'key': 'outputs', 'type': '{ComponentOutput}'}, 'command': {'key': 'command', 'type': 'str'}, 'environment_name': {'key': 'environmentName', 'type': 'str'}, 'environment_version': {'key': 'environmentVersion', 'type': 'str'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, 'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'}, 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'SchemaContractsCreatedBy'}, 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, 'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'}, 'component_internal_id': {'key': 'componentInternalId', 'type': 'str'}, } def __init__( self, *, component_schema: Optional[str] = None, is_anonymous: Optional[bool] = None, name: Optional[str] = None, version: Optional[str] = None, type: Optional[Union[str, "ComponentType"]] = None, component_type_version: Optional[str] = None, display_name: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, is_deterministic: Optional[bool] = None, successful_return_code: Optional[str] = None, inputs: Optional[Dict[str, "ComponentInput"]] = None, outputs: Optional[Dict[str, "ComponentOutput"]] = None, command: Optional[str] = None, environment_name: Optional[str] = None, environment_version: Optional[str] = None, snapshot_id: Optional[str] = None, created_by: Optional["SchemaContractsCreatedBy"] = None, last_modified_by: Optional["SchemaContractsCreatedBy"] = None, created_date: Optional[datetime.datetime] = None, last_modified_date: Optional[datetime.datetime] = None, component_internal_id: Optional[str] = None, **kwargs ): """ :keyword component_schema: :paramtype component_schema: str :keyword is_anonymous: :paramtype is_anonymous: bool :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword type: Possible values include: "Unknown", "CommandComponent", "Command". :paramtype type: str or ~flow.models.ComponentType :keyword component_type_version: :paramtype component_type_version: str :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword is_deterministic: :paramtype is_deterministic: bool :keyword successful_return_code: :paramtype successful_return_code: str :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.ComponentInput] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.ComponentOutput] :keyword command: :paramtype command: str :keyword environment_name: :paramtype environment_name: str :keyword environment_version: :paramtype environment_version: str :keyword snapshot_id: :paramtype snapshot_id: str :keyword created_by: :paramtype created_by: ~flow.models.SchemaContractsCreatedBy :keyword last_modified_by: :paramtype last_modified_by: ~flow.models.SchemaContractsCreatedBy :keyword created_date: :paramtype created_date: ~datetime.datetime :keyword last_modified_date: :paramtype last_modified_date: ~datetime.datetime :keyword component_internal_id: :paramtype component_internal_id: str """ super(RawComponentDto, self).__init__(**kwargs) self.component_schema = component_schema self.is_anonymous = is_anonymous self.name = name self.version = version self.type = type self.component_type_version = component_type_version self.display_name = display_name self.description = description self.tags = tags self.properties = properties self.is_deterministic = is_deterministic self.successful_return_code = successful_return_code self.inputs = inputs self.outputs = outputs self.command = command self.environment_name = environment_name self.environment_version = environment_version self.snapshot_id = snapshot_id self.created_by = created_by self.last_modified_by = last_modified_by self.created_date = created_date self.last_modified_date = last_modified_date self.component_internal_id = component_internal_id class RayConfiguration(msrest.serialization.Model): """RayConfiguration. :ivar port: :vartype port: int :ivar address: :vartype address: str :ivar include_dashboard: :vartype include_dashboard: bool :ivar dashboard_port: :vartype dashboard_port: int :ivar head_node_additional_args: :vartype head_node_additional_args: str :ivar worker_node_additional_args: :vartype worker_node_additional_args: str """ _attribute_map = { 'port': {'key': 'port', 'type': 'int'}, 'address': {'key': 'address', 'type': 'str'}, 'include_dashboard': {'key': 'includeDashboard', 'type': 'bool'}, 'dashboard_port': {'key': 'dashboardPort', 'type': 'int'}, 'head_node_additional_args': {'key': 'headNodeAdditionalArgs', 'type': 'str'}, 'worker_node_additional_args': {'key': 'workerNodeAdditionalArgs', 'type': 'str'}, } def __init__( self, *, port: Optional[int] = None, address: Optional[str] = None, include_dashboard: Optional[bool] = None, dashboard_port: Optional[int] = None, head_node_additional_args: Optional[str] = None, worker_node_additional_args: Optional[str] = None, **kwargs ): """ :keyword port: :paramtype port: int :keyword address: :paramtype address: str :keyword include_dashboard: :paramtype include_dashboard: bool :keyword dashboard_port: :paramtype dashboard_port: int :keyword head_node_additional_args: :paramtype head_node_additional_args: str :keyword worker_node_additional_args: :paramtype worker_node_additional_args: str """ super(RayConfiguration, self).__init__(**kwargs) self.port = port self.address = address self.include_dashboard = include_dashboard self.dashboard_port = dashboard_port self.head_node_additional_args = head_node_additional_args self.worker_node_additional_args = worker_node_additional_args class RCranPackage(msrest.serialization.Model): """RCranPackage. :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar repository: :vartype repository: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'repository': {'key': 'repository', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, version: Optional[str] = None, repository: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword repository: :paramtype repository: str """ super(RCranPackage, self).__init__(**kwargs) self.name = name self.version = version self.repository = repository class RealTimeEndpoint(msrest.serialization.Model): """RealTimeEndpoint. :ivar created_by: :vartype created_by: str :ivar kv_tags: Dictionary of :code:`<string>`. :vartype kv_tags: dict[str, str] :ivar state: Possible values include: "Transitioning", "Healthy", "Unhealthy", "Failed", "Unschedulable". :vartype state: str or ~flow.models.WebServiceState :ivar error: :vartype error: ~flow.models.ModelManagementErrorResponse :ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN". :vartype compute_type: str or ~flow.models.ComputeEnvironmentType :ivar image_id: :vartype image_id: str :ivar cpu: :vartype cpu: float :ivar memory_in_gb: :vartype memory_in_gb: float :ivar max_concurrent_requests_per_container: :vartype max_concurrent_requests_per_container: int :ivar num_replicas: :vartype num_replicas: int :ivar event_hub_enabled: :vartype event_hub_enabled: bool :ivar storage_enabled: :vartype storage_enabled: bool :ivar app_insights_enabled: :vartype app_insights_enabled: bool :ivar auto_scale_enabled: :vartype auto_scale_enabled: bool :ivar min_replicas: :vartype min_replicas: int :ivar max_replicas: :vartype max_replicas: int :ivar target_utilization: :vartype target_utilization: int :ivar refresh_period_in_seconds: :vartype refresh_period_in_seconds: int :ivar scoring_uri: :vartype scoring_uri: str :ivar deployment_status: :vartype deployment_status: ~flow.models.AKSReplicaStatus :ivar scoring_timeout_ms: :vartype scoring_timeout_ms: int :ivar auth_enabled: :vartype auth_enabled: bool :ivar aad_auth_enabled: :vartype aad_auth_enabled: bool :ivar region: :vartype region: str :ivar primary_key: :vartype primary_key: str :ivar secondary_key: :vartype secondary_key: str :ivar swagger_uri: :vartype swagger_uri: str :ivar linked_pipeline_draft_id: :vartype linked_pipeline_draft_id: str :ivar linked_pipeline_run_id: :vartype linked_pipeline_run_id: str :ivar warning: :vartype warning: str :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar id: :vartype id: str :ivar created_time: :vartype created_time: ~datetime.datetime :ivar updated_time: :vartype updated_time: ~datetime.datetime :ivar compute_name: :vartype compute_name: str :ivar updated_by: :vartype updated_by: str """ _attribute_map = { 'created_by': {'key': 'createdBy', 'type': 'str'}, 'kv_tags': {'key': 'kvTags', 'type': '{str}'}, 'state': {'key': 'state', 'type': 'str'}, 'error': {'key': 'error', 'type': 'ModelManagementErrorResponse'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'image_id': {'key': 'imageId', 'type': 'str'}, 'cpu': {'key': 'cpu', 'type': 'float'}, 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'}, 'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'}, 'num_replicas': {'key': 'numReplicas', 'type': 'int'}, 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'}, 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'}, 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, 'auto_scale_enabled': {'key': 'autoScaleEnabled', 'type': 'bool'}, 'min_replicas': {'key': 'minReplicas', 'type': 'int'}, 'max_replicas': {'key': 'maxReplicas', 'type': 'int'}, 'target_utilization': {'key': 'targetUtilization', 'type': 'int'}, 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'}, 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, 'deployment_status': {'key': 'deploymentStatus', 'type': 'AKSReplicaStatus'}, 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'}, 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'}, 'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'}, 'region': {'key': 'region', 'type': 'str'}, 'primary_key': {'key': 'primaryKey', 'type': 'str'}, 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, 'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'}, 'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'}, 'warning': {'key': 'warning', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'}, 'compute_name': {'key': 'computeName', 'type': 'str'}, 'updated_by': {'key': 'updatedBy', 'type': 'str'}, } def __init__( self, *, created_by: Optional[str] = None, kv_tags: Optional[Dict[str, str]] = None, state: Optional[Union[str, "WebServiceState"]] = None, error: Optional["ModelManagementErrorResponse"] = None, compute_type: Optional[Union[str, "ComputeEnvironmentType"]] = None, image_id: Optional[str] = None, cpu: Optional[float] = None, memory_in_gb: Optional[float] = None, max_concurrent_requests_per_container: Optional[int] = None, num_replicas: Optional[int] = None, event_hub_enabled: Optional[bool] = None, storage_enabled: Optional[bool] = None, app_insights_enabled: Optional[bool] = None, auto_scale_enabled: Optional[bool] = None, min_replicas: Optional[int] = None, max_replicas: Optional[int] = None, target_utilization: Optional[int] = None, refresh_period_in_seconds: Optional[int] = None, scoring_uri: Optional[str] = None, deployment_status: Optional["AKSReplicaStatus"] = None, scoring_timeout_ms: Optional[int] = None, auth_enabled: Optional[bool] = None, aad_auth_enabled: Optional[bool] = None, region: Optional[str] = None, primary_key: Optional[str] = None, secondary_key: Optional[str] = None, swagger_uri: Optional[str] = None, linked_pipeline_draft_id: Optional[str] = None, linked_pipeline_run_id: Optional[str] = None, warning: Optional[str] = None, name: Optional[str] = None, description: Optional[str] = None, id: Optional[str] = None, created_time: Optional[datetime.datetime] = None, updated_time: Optional[datetime.datetime] = None, compute_name: Optional[str] = None, updated_by: Optional[str] = None, **kwargs ): """ :keyword created_by: :paramtype created_by: str :keyword kv_tags: Dictionary of :code:`<string>`. :paramtype kv_tags: dict[str, str] :keyword state: Possible values include: "Transitioning", "Healthy", "Unhealthy", "Failed", "Unschedulable". :paramtype state: str or ~flow.models.WebServiceState :keyword error: :paramtype error: ~flow.models.ModelManagementErrorResponse :keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN". :paramtype compute_type: str or ~flow.models.ComputeEnvironmentType :keyword image_id: :paramtype image_id: str :keyword cpu: :paramtype cpu: float :keyword memory_in_gb: :paramtype memory_in_gb: float :keyword max_concurrent_requests_per_container: :paramtype max_concurrent_requests_per_container: int :keyword num_replicas: :paramtype num_replicas: int :keyword event_hub_enabled: :paramtype event_hub_enabled: bool :keyword storage_enabled: :paramtype storage_enabled: bool :keyword app_insights_enabled: :paramtype app_insights_enabled: bool :keyword auto_scale_enabled: :paramtype auto_scale_enabled: bool :keyword min_replicas: :paramtype min_replicas: int :keyword max_replicas: :paramtype max_replicas: int :keyword target_utilization: :paramtype target_utilization: int :keyword refresh_period_in_seconds: :paramtype refresh_period_in_seconds: int :keyword scoring_uri: :paramtype scoring_uri: str :keyword deployment_status: :paramtype deployment_status: ~flow.models.AKSReplicaStatus :keyword scoring_timeout_ms: :paramtype scoring_timeout_ms: int :keyword auth_enabled: :paramtype auth_enabled: bool :keyword aad_auth_enabled: :paramtype aad_auth_enabled: bool :keyword region: :paramtype region: str :keyword primary_key: :paramtype primary_key: str :keyword secondary_key: :paramtype secondary_key: str :keyword swagger_uri: :paramtype swagger_uri: str :keyword linked_pipeline_draft_id: :paramtype linked_pipeline_draft_id: str :keyword linked_pipeline_run_id: :paramtype linked_pipeline_run_id: str :keyword warning: :paramtype warning: str :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword id: :paramtype id: str :keyword created_time: :paramtype created_time: ~datetime.datetime :keyword updated_time: :paramtype updated_time: ~datetime.datetime :keyword compute_name: :paramtype compute_name: str :keyword updated_by: :paramtype updated_by: str """ super(RealTimeEndpoint, self).__init__(**kwargs) self.created_by = created_by self.kv_tags = kv_tags self.state = state self.error = error self.compute_type = compute_type self.image_id = image_id self.cpu = cpu self.memory_in_gb = memory_in_gb self.max_concurrent_requests_per_container = max_concurrent_requests_per_container self.num_replicas = num_replicas self.event_hub_enabled = event_hub_enabled self.storage_enabled = storage_enabled self.app_insights_enabled = app_insights_enabled self.auto_scale_enabled = auto_scale_enabled self.min_replicas = min_replicas self.max_replicas = max_replicas self.target_utilization = target_utilization self.refresh_period_in_seconds = refresh_period_in_seconds self.scoring_uri = scoring_uri self.deployment_status = deployment_status self.scoring_timeout_ms = scoring_timeout_ms self.auth_enabled = auth_enabled self.aad_auth_enabled = aad_auth_enabled self.region = region self.primary_key = primary_key self.secondary_key = secondary_key self.swagger_uri = swagger_uri self.linked_pipeline_draft_id = linked_pipeline_draft_id self.linked_pipeline_run_id = linked_pipeline_run_id self.warning = warning self.name = name self.description = description self.id = id self.created_time = created_time self.updated_time = updated_time self.compute_name = compute_name self.updated_by = updated_by class RealTimeEndpointInfo(msrest.serialization.Model): """RealTimeEndpointInfo. :ivar web_service_inputs: :vartype web_service_inputs: list[~flow.models.WebServicePort] :ivar web_service_outputs: :vartype web_service_outputs: list[~flow.models.WebServicePort] :ivar deployments_info: :vartype deployments_info: list[~flow.models.DeploymentInfo] """ _attribute_map = { 'web_service_inputs': {'key': 'webServiceInputs', 'type': '[WebServicePort]'}, 'web_service_outputs': {'key': 'webServiceOutputs', 'type': '[WebServicePort]'}, 'deployments_info': {'key': 'deploymentsInfo', 'type': '[DeploymentInfo]'}, } def __init__( self, *, web_service_inputs: Optional[List["WebServicePort"]] = None, web_service_outputs: Optional[List["WebServicePort"]] = None, deployments_info: Optional[List["DeploymentInfo"]] = None, **kwargs ): """ :keyword web_service_inputs: :paramtype web_service_inputs: list[~flow.models.WebServicePort] :keyword web_service_outputs: :paramtype web_service_outputs: list[~flow.models.WebServicePort] :keyword deployments_info: :paramtype deployments_info: list[~flow.models.DeploymentInfo] """ super(RealTimeEndpointInfo, self).__init__(**kwargs) self.web_service_inputs = web_service_inputs self.web_service_outputs = web_service_outputs self.deployments_info = deployments_info class RealTimeEndpointStatus(msrest.serialization.Model): """RealTimeEndpointStatus. :ivar last_operation: Possible values include: "Create", "Update", "Delete". :vartype last_operation: str or ~flow.models.RealTimeEndpointOpCode :ivar last_operation_status: Possible values include: "Ongoing", "Succeeded", "Failed", "SucceededWithWarning". :vartype last_operation_status: str or ~flow.models.RealTimeEndpointOpStatusCode :ivar internal_step: Possible values include: "AboutToDeploy", "WaitAksComputeReady", "RegisterModels", "CreateServiceFromModels", "UpdateServiceFromModels", "WaitServiceCreating", "FetchServiceRelatedInfo", "TestWithSampleData", "AboutToDelete", "DeleteDeployment", "DeleteAsset", "DeleteImage", "DeleteModel", "DeleteServiceRecord". :vartype internal_step: str or ~flow.models.RealTimeEndpointInternalStepCode :ivar status_detail: :vartype status_detail: str :ivar deployment_state: :vartype deployment_state: str :ivar service_id: :vartype service_id: str :ivar linked_pipeline_draft_id: :vartype linked_pipeline_draft_id: str """ _attribute_map = { 'last_operation': {'key': 'lastOperation', 'type': 'str'}, 'last_operation_status': {'key': 'lastOperationStatus', 'type': 'str'}, 'internal_step': {'key': 'internalStep', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'deployment_state': {'key': 'deploymentState', 'type': 'str'}, 'service_id': {'key': 'serviceId', 'type': 'str'}, 'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'}, } def __init__( self, *, last_operation: Optional[Union[str, "RealTimeEndpointOpCode"]] = None, last_operation_status: Optional[Union[str, "RealTimeEndpointOpStatusCode"]] = None, internal_step: Optional[Union[str, "RealTimeEndpointInternalStepCode"]] = None, status_detail: Optional[str] = None, deployment_state: Optional[str] = None, service_id: Optional[str] = None, linked_pipeline_draft_id: Optional[str] = None, **kwargs ): """ :keyword last_operation: Possible values include: "Create", "Update", "Delete". :paramtype last_operation: str or ~flow.models.RealTimeEndpointOpCode :keyword last_operation_status: Possible values include: "Ongoing", "Succeeded", "Failed", "SucceededWithWarning". :paramtype last_operation_status: str or ~flow.models.RealTimeEndpointOpStatusCode :keyword internal_step: Possible values include: "AboutToDeploy", "WaitAksComputeReady", "RegisterModels", "CreateServiceFromModels", "UpdateServiceFromModels", "WaitServiceCreating", "FetchServiceRelatedInfo", "TestWithSampleData", "AboutToDelete", "DeleteDeployment", "DeleteAsset", "DeleteImage", "DeleteModel", "DeleteServiceRecord". :paramtype internal_step: str or ~flow.models.RealTimeEndpointInternalStepCode :keyword status_detail: :paramtype status_detail: str :keyword deployment_state: :paramtype deployment_state: str :keyword service_id: :paramtype service_id: str :keyword linked_pipeline_draft_id: :paramtype linked_pipeline_draft_id: str """ super(RealTimeEndpointStatus, self).__init__(**kwargs) self.last_operation = last_operation self.last_operation_status = last_operation_status self.internal_step = internal_step self.status_detail = status_detail self.deployment_state = deployment_state self.service_id = service_id self.linked_pipeline_draft_id = linked_pipeline_draft_id class RealTimeEndpointSummary(msrest.serialization.Model): """RealTimeEndpointSummary. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar id: :vartype id: str :ivar created_time: :vartype created_time: ~datetime.datetime :ivar updated_time: :vartype updated_time: ~datetime.datetime :ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN". :vartype compute_type: str or ~flow.models.ComputeEnvironmentType :ivar compute_name: :vartype compute_name: str :ivar updated_by: :vartype updated_by: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'compute_name': {'key': 'computeName', 'type': 'str'}, 'updated_by': {'key': 'updatedBy', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, description: Optional[str] = None, id: Optional[str] = None, created_time: Optional[datetime.datetime] = None, updated_time: Optional[datetime.datetime] = None, compute_type: Optional[Union[str, "ComputeEnvironmentType"]] = None, compute_name: Optional[str] = None, updated_by: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword id: :paramtype id: str :keyword created_time: :paramtype created_time: ~datetime.datetime :keyword updated_time: :paramtype updated_time: ~datetime.datetime :keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN". :paramtype compute_type: str or ~flow.models.ComputeEnvironmentType :keyword compute_name: :paramtype compute_name: str :keyword updated_by: :paramtype updated_by: str """ super(RealTimeEndpointSummary, self).__init__(**kwargs) self.name = name self.description = description self.id = id self.created_time = created_time self.updated_time = updated_time self.compute_type = compute_type self.compute_name = compute_name self.updated_by = updated_by class RealTimeEndpointTestRequest(msrest.serialization.Model): """RealTimeEndpointTestRequest. :ivar end_point: :vartype end_point: str :ivar auth_key: :vartype auth_key: str :ivar payload: :vartype payload: str """ _attribute_map = { 'end_point': {'key': 'endPoint', 'type': 'str'}, 'auth_key': {'key': 'authKey', 'type': 'str'}, 'payload': {'key': 'payload', 'type': 'str'}, } def __init__( self, *, end_point: Optional[str] = None, auth_key: Optional[str] = None, payload: Optional[str] = None, **kwargs ): """ :keyword end_point: :paramtype end_point: str :keyword auth_key: :paramtype auth_key: str :keyword payload: :paramtype payload: str """ super(RealTimeEndpointTestRequest, self).__init__(**kwargs) self.end_point = end_point self.auth_key = auth_key self.payload = payload class Recurrence(msrest.serialization.Model): """Recurrence. :ivar frequency: Possible values include: "Month", "Week", "Day", "Hour", "Minute". :vartype frequency: str or ~flow.models.Frequency :ivar interval: :vartype interval: int :ivar schedule: :vartype schedule: ~flow.models.RecurrenceSchedule :ivar end_time: :vartype end_time: str :ivar start_time: :vartype start_time: str :ivar time_zone: :vartype time_zone: str """ _attribute_map = { 'frequency': {'key': 'frequency', 'type': 'str'}, 'interval': {'key': 'interval', 'type': 'int'}, 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, 'end_time': {'key': 'endTime', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'str'}, 'time_zone': {'key': 'timeZone', 'type': 'str'}, } def __init__( self, *, frequency: Optional[Union[str, "Frequency"]] = None, interval: Optional[int] = None, schedule: Optional["RecurrenceSchedule"] = None, end_time: Optional[str] = None, start_time: Optional[str] = None, time_zone: Optional[str] = None, **kwargs ): """ :keyword frequency: Possible values include: "Month", "Week", "Day", "Hour", "Minute". :paramtype frequency: str or ~flow.models.Frequency :keyword interval: :paramtype interval: int :keyword schedule: :paramtype schedule: ~flow.models.RecurrenceSchedule :keyword end_time: :paramtype end_time: str :keyword start_time: :paramtype start_time: str :keyword time_zone: :paramtype time_zone: str """ super(Recurrence, self).__init__(**kwargs) self.frequency = frequency self.interval = interval self.schedule = schedule self.end_time = end_time self.start_time = start_time self.time_zone = time_zone class RecurrencePattern(msrest.serialization.Model): """RecurrencePattern. :ivar hours: :vartype hours: list[int] :ivar minutes: :vartype minutes: list[int] :ivar weekdays: :vartype weekdays: list[str or ~flow.models.Weekday] """ _attribute_map = { 'hours': {'key': 'hours', 'type': '[int]'}, 'minutes': {'key': 'minutes', 'type': '[int]'}, 'weekdays': {'key': 'weekdays', 'type': '[str]'}, } def __init__( self, *, hours: Optional[List[int]] = None, minutes: Optional[List[int]] = None, weekdays: Optional[List[Union[str, "Weekday"]]] = None, **kwargs ): """ :keyword hours: :paramtype hours: list[int] :keyword minutes: :paramtype minutes: list[int] :keyword weekdays: :paramtype weekdays: list[str or ~flow.models.Weekday] """ super(RecurrencePattern, self).__init__(**kwargs) self.hours = hours self.minutes = minutes self.weekdays = weekdays class RecurrenceSchedule(msrest.serialization.Model): """RecurrenceSchedule. :ivar hours: :vartype hours: list[int] :ivar minutes: :vartype minutes: list[int] :ivar week_days: :vartype week_days: list[str or ~flow.models.WeekDays] :ivar month_days: :vartype month_days: list[int] """ _attribute_map = { 'hours': {'key': 'hours', 'type': '[int]'}, 'minutes': {'key': 'minutes', 'type': '[int]'}, 'week_days': {'key': 'weekDays', 'type': '[str]'}, 'month_days': {'key': 'monthDays', 'type': '[int]'}, } def __init__( self, *, hours: Optional[List[int]] = None, minutes: Optional[List[int]] = None, week_days: Optional[List[Union[str, "WeekDays"]]] = None, month_days: Optional[List[int]] = None, **kwargs ): """ :keyword hours: :paramtype hours: list[int] :keyword minutes: :paramtype minutes: list[int] :keyword week_days: :paramtype week_days: list[str or ~flow.models.WeekDays] :keyword month_days: :paramtype month_days: list[int] """ super(RecurrenceSchedule, self).__init__(**kwargs) self.hours = hours self.minutes = minutes self.week_days = week_days self.month_days = month_days class RegenerateServiceKeysRequest(msrest.serialization.Model): """RegenerateServiceKeysRequest. :ivar key_type: Possible values include: "Primary", "Secondary". :vartype key_type: str or ~flow.models.KeyType :ivar key_value: :vartype key_value: str """ _attribute_map = { 'key_type': {'key': 'keyType', 'type': 'str'}, 'key_value': {'key': 'keyValue', 'type': 'str'}, } def __init__( self, *, key_type: Optional[Union[str, "KeyType"]] = None, key_value: Optional[str] = None, **kwargs ): """ :keyword key_type: Possible values include: "Primary", "Secondary". :paramtype key_type: str or ~flow.models.KeyType :keyword key_value: :paramtype key_value: str """ super(RegenerateServiceKeysRequest, self).__init__(**kwargs) self.key_type = key_type self.key_value = key_value class RegisterComponentMetaInfo(msrest.serialization.Model): """RegisterComponentMetaInfo. :ivar aml_module_name: :vartype aml_module_name: str :ivar name_only_display_info: :vartype name_only_display_info: str :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar module_version_id: :vartype module_version_id: str :ivar snapshot_id: :vartype snapshot_id: str :ivar component_registration_type: Possible values include: "Normal", "AnonymousAmlModule", "AnonymousAmlModuleVersion", "ModuleEntityOnly". :vartype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum :ivar module_entity_from_yaml: :vartype module_entity_from_yaml: ~flow.models.ModuleEntity :ivar set_as_default_version: :vartype set_as_default_version: bool :ivar data_types_from_yaml: :vartype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo] :ivar data_type_mechanism: Possible values include: "ErrorWhenNotExisting", "RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly". :vartype data_type_mechanism: str or ~flow.models.DataTypeMechanism :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hashes: :vartype identifier_hashes: ~flow.models.RegisterComponentMetaInfoIdentifierHashes :ivar content_hash: :vartype content_hash: str :ivar extra_hash: :vartype extra_hash: str :ivar extra_hashes: :vartype extra_hashes: ~flow.models.RegisterComponentMetaInfoExtraHashes :ivar registration: :vartype registration: bool :ivar validate_only: :vartype validate_only: bool :ivar skip_workspace_related_check: :vartype skip_workspace_related_check: bool :ivar intellectual_property_protected_workspace_component_registration_allowed_publisher: :vartype intellectual_property_protected_workspace_component_registration_allowed_publisher: list[str] :ivar system_managed_registration: :vartype system_managed_registration: bool :ivar allow_dup_name_between_input_and_ouput_port: :vartype allow_dup_name_between_input_and_ouput_port: bool :ivar module_source: :vartype module_source: str :ivar module_scope: :vartype module_scope: str :ivar module_additional_includes_count: :vartype module_additional_includes_count: int :ivar module_os_type: :vartype module_os_type: str :ivar module_codegen_by: :vartype module_codegen_by: str :ivar module_client_source: :vartype module_client_source: str :ivar module_is_builtin: :vartype module_is_builtin: bool :ivar module_register_event_extension_fields: Dictionary of :code:`<string>`. :vartype module_register_event_extension_fields: dict[str, str] """ _attribute_map = { 'aml_module_name': {'key': 'amlModuleName', 'type': 'str'}, 'name_only_display_info': {'key': 'nameOnlyDisplayInfo', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'module_version_id': {'key': 'moduleVersionId', 'type': 'str'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, 'component_registration_type': {'key': 'componentRegistrationType', 'type': 'str'}, 'module_entity_from_yaml': {'key': 'moduleEntityFromYaml', 'type': 'ModuleEntity'}, 'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'}, 'data_types_from_yaml': {'key': 'dataTypesFromYaml', 'type': '[DataTypeCreationInfo]'}, 'data_type_mechanism': {'key': 'dataTypeMechanism', 'type': 'str'}, 'identifier_hash': {'key': 'identifierHash', 'type': 'str'}, 'identifier_hashes': {'key': 'identifierHashes', 'type': 'RegisterComponentMetaInfoIdentifierHashes'}, 'content_hash': {'key': 'contentHash', 'type': 'str'}, 'extra_hash': {'key': 'extraHash', 'type': 'str'}, 'extra_hashes': {'key': 'extraHashes', 'type': 'RegisterComponentMetaInfoExtraHashes'}, 'registration': {'key': 'registration', 'type': 'bool'}, 'validate_only': {'key': 'validateOnly', 'type': 'bool'}, 'skip_workspace_related_check': {'key': 'skipWorkspaceRelatedCheck', 'type': 'bool'}, 'intellectual_property_protected_workspace_component_registration_allowed_publisher': {'key': 'intellectualPropertyProtectedWorkspaceComponentRegistrationAllowedPublisher', 'type': '[str]'}, 'system_managed_registration': {'key': 'systemManagedRegistration', 'type': 'bool'}, 'allow_dup_name_between_input_and_ouput_port': {'key': 'allowDupNameBetweenInputAndOuputPort', 'type': 'bool'}, 'module_source': {'key': 'moduleSource', 'type': 'str'}, 'module_scope': {'key': 'moduleScope', 'type': 'str'}, 'module_additional_includes_count': {'key': 'moduleAdditionalIncludesCount', 'type': 'int'}, 'module_os_type': {'key': 'moduleOSType', 'type': 'str'}, 'module_codegen_by': {'key': 'moduleCodegenBy', 'type': 'str'}, 'module_client_source': {'key': 'moduleClientSource', 'type': 'str'}, 'module_is_builtin': {'key': 'moduleIsBuiltin', 'type': 'bool'}, 'module_register_event_extension_fields': {'key': 'moduleRegisterEventExtensionFields', 'type': '{str}'}, } def __init__( self, *, aml_module_name: Optional[str] = None, name_only_display_info: Optional[str] = None, name: Optional[str] = None, version: Optional[str] = None, module_version_id: Optional[str] = None, snapshot_id: Optional[str] = None, component_registration_type: Optional[Union[str, "ComponentRegistrationTypeEnum"]] = None, module_entity_from_yaml: Optional["ModuleEntity"] = None, set_as_default_version: Optional[bool] = None, data_types_from_yaml: Optional[List["DataTypeCreationInfo"]] = None, data_type_mechanism: Optional[Union[str, "DataTypeMechanism"]] = None, identifier_hash: Optional[str] = None, identifier_hashes: Optional["RegisterComponentMetaInfoIdentifierHashes"] = None, content_hash: Optional[str] = None, extra_hash: Optional[str] = None, extra_hashes: Optional["RegisterComponentMetaInfoExtraHashes"] = None, registration: Optional[bool] = None, validate_only: Optional[bool] = None, skip_workspace_related_check: Optional[bool] = None, intellectual_property_protected_workspace_component_registration_allowed_publisher: Optional[List[str]] = None, system_managed_registration: Optional[bool] = None, allow_dup_name_between_input_and_ouput_port: Optional[bool] = None, module_source: Optional[str] = None, module_scope: Optional[str] = None, module_additional_includes_count: Optional[int] = None, module_os_type: Optional[str] = None, module_codegen_by: Optional[str] = None, module_client_source: Optional[str] = None, module_is_builtin: Optional[bool] = None, module_register_event_extension_fields: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword aml_module_name: :paramtype aml_module_name: str :keyword name_only_display_info: :paramtype name_only_display_info: str :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword module_version_id: :paramtype module_version_id: str :keyword snapshot_id: :paramtype snapshot_id: str :keyword component_registration_type: Possible values include: "Normal", "AnonymousAmlModule", "AnonymousAmlModuleVersion", "ModuleEntityOnly". :paramtype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum :keyword module_entity_from_yaml: :paramtype module_entity_from_yaml: ~flow.models.ModuleEntity :keyword set_as_default_version: :paramtype set_as_default_version: bool :keyword data_types_from_yaml: :paramtype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo] :keyword data_type_mechanism: Possible values include: "ErrorWhenNotExisting", "RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly". :paramtype data_type_mechanism: str or ~flow.models.DataTypeMechanism :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hashes: :paramtype identifier_hashes: ~flow.models.RegisterComponentMetaInfoIdentifierHashes :keyword content_hash: :paramtype content_hash: str :keyword extra_hash: :paramtype extra_hash: str :keyword extra_hashes: :paramtype extra_hashes: ~flow.models.RegisterComponentMetaInfoExtraHashes :keyword registration: :paramtype registration: bool :keyword validate_only: :paramtype validate_only: bool :keyword skip_workspace_related_check: :paramtype skip_workspace_related_check: bool :keyword intellectual_property_protected_workspace_component_registration_allowed_publisher: :paramtype intellectual_property_protected_workspace_component_registration_allowed_publisher: list[str] :keyword system_managed_registration: :paramtype system_managed_registration: bool :keyword allow_dup_name_between_input_and_ouput_port: :paramtype allow_dup_name_between_input_and_ouput_port: bool :keyword module_source: :paramtype module_source: str :keyword module_scope: :paramtype module_scope: str :keyword module_additional_includes_count: :paramtype module_additional_includes_count: int :keyword module_os_type: :paramtype module_os_type: str :keyword module_codegen_by: :paramtype module_codegen_by: str :keyword module_client_source: :paramtype module_client_source: str :keyword module_is_builtin: :paramtype module_is_builtin: bool :keyword module_register_event_extension_fields: Dictionary of :code:`<string>`. :paramtype module_register_event_extension_fields: dict[str, str] """ super(RegisterComponentMetaInfo, self).__init__(**kwargs) self.aml_module_name = aml_module_name self.name_only_display_info = name_only_display_info self.name = name self.version = version self.module_version_id = module_version_id self.snapshot_id = snapshot_id self.component_registration_type = component_registration_type self.module_entity_from_yaml = module_entity_from_yaml self.set_as_default_version = set_as_default_version self.data_types_from_yaml = data_types_from_yaml self.data_type_mechanism = data_type_mechanism self.identifier_hash = identifier_hash self.identifier_hashes = identifier_hashes self.content_hash = content_hash self.extra_hash = extra_hash self.extra_hashes = extra_hashes self.registration = registration self.validate_only = validate_only self.skip_workspace_related_check = skip_workspace_related_check self.intellectual_property_protected_workspace_component_registration_allowed_publisher = intellectual_property_protected_workspace_component_registration_allowed_publisher self.system_managed_registration = system_managed_registration self.allow_dup_name_between_input_and_ouput_port = allow_dup_name_between_input_and_ouput_port self.module_source = module_source self.module_scope = module_scope self.module_additional_includes_count = module_additional_includes_count self.module_os_type = module_os_type self.module_codegen_by = module_codegen_by self.module_client_source = module_client_source self.module_is_builtin = module_is_builtin self.module_register_event_extension_fields = module_register_event_extension_fields class RegisterComponentMetaInfoExtraHashes(msrest.serialization.Model): """RegisterComponentMetaInfoExtraHashes. :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str """ _attribute_map = { 'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'}, } def __init__( self, *, identifier_hash: Optional[str] = None, identifier_hash_v2: Optional[str] = None, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str """ super(RegisterComponentMetaInfoExtraHashes, self).__init__(**kwargs) self.identifier_hash = identifier_hash self.identifier_hash_v2 = identifier_hash_v2 class RegisterComponentMetaInfoIdentifierHashes(msrest.serialization.Model): """RegisterComponentMetaInfoIdentifierHashes. :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str """ _attribute_map = { 'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'}, } def __init__( self, *, identifier_hash: Optional[str] = None, identifier_hash_v2: Optional[str] = None, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str """ super(RegisterComponentMetaInfoIdentifierHashes, self).__init__(**kwargs) self.identifier_hash = identifier_hash self.identifier_hash_v2 = identifier_hash_v2 class RegisteredDataSetReference(msrest.serialization.Model): """RegisteredDataSetReference. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar version: :vartype version: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, version: Optional[str] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword version: :paramtype version: str """ super(RegisteredDataSetReference, self).__init__(**kwargs) self.id = id self.name = name self.version = version class RegisterRegistryComponentMetaInfo(msrest.serialization.Model): """RegisterRegistryComponentMetaInfo. :ivar registry_name: :vartype registry_name: str :ivar intellectual_property_publisher_information: :vartype intellectual_property_publisher_information: ~flow.models.IntellectualPropertyPublisherInformation :ivar blob_reference_data: This is a dictionary. :vartype blob_reference_data: dict[str, ~flow.models.RegistryBlobReferenceData] :ivar aml_module_name: :vartype aml_module_name: str :ivar name_only_display_info: :vartype name_only_display_info: str :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar module_version_id: :vartype module_version_id: str :ivar snapshot_id: :vartype snapshot_id: str :ivar component_registration_type: Possible values include: "Normal", "AnonymousAmlModule", "AnonymousAmlModuleVersion", "ModuleEntityOnly". :vartype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum :ivar module_entity_from_yaml: :vartype module_entity_from_yaml: ~flow.models.ModuleEntity :ivar set_as_default_version: :vartype set_as_default_version: bool :ivar data_types_from_yaml: :vartype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo] :ivar data_type_mechanism: Possible values include: "ErrorWhenNotExisting", "RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly". :vartype data_type_mechanism: str or ~flow.models.DataTypeMechanism :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hashes: :vartype identifier_hashes: ~flow.models.RegisterRegistryComponentMetaInfoIdentifierHashes :ivar content_hash: :vartype content_hash: str :ivar extra_hash: :vartype extra_hash: str :ivar extra_hashes: :vartype extra_hashes: ~flow.models.RegisterRegistryComponentMetaInfoExtraHashes :ivar registration: :vartype registration: bool :ivar validate_only: :vartype validate_only: bool :ivar skip_workspace_related_check: :vartype skip_workspace_related_check: bool :ivar intellectual_property_protected_workspace_component_registration_allowed_publisher: :vartype intellectual_property_protected_workspace_component_registration_allowed_publisher: list[str] :ivar system_managed_registration: :vartype system_managed_registration: bool :ivar allow_dup_name_between_input_and_ouput_port: :vartype allow_dup_name_between_input_and_ouput_port: bool :ivar module_source: :vartype module_source: str :ivar module_scope: :vartype module_scope: str :ivar module_additional_includes_count: :vartype module_additional_includes_count: int :ivar module_os_type: :vartype module_os_type: str :ivar module_codegen_by: :vartype module_codegen_by: str :ivar module_client_source: :vartype module_client_source: str :ivar module_is_builtin: :vartype module_is_builtin: bool :ivar module_register_event_extension_fields: Dictionary of :code:`<string>`. :vartype module_register_event_extension_fields: dict[str, str] """ _attribute_map = { 'registry_name': {'key': 'registryName', 'type': 'str'}, 'intellectual_property_publisher_information': {'key': 'intellectualPropertyPublisherInformation', 'type': 'IntellectualPropertyPublisherInformation'}, 'blob_reference_data': {'key': 'blobReferenceData', 'type': '{RegistryBlobReferenceData}'}, 'aml_module_name': {'key': 'amlModuleName', 'type': 'str'}, 'name_only_display_info': {'key': 'nameOnlyDisplayInfo', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'module_version_id': {'key': 'moduleVersionId', 'type': 'str'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, 'component_registration_type': {'key': 'componentRegistrationType', 'type': 'str'}, 'module_entity_from_yaml': {'key': 'moduleEntityFromYaml', 'type': 'ModuleEntity'}, 'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'}, 'data_types_from_yaml': {'key': 'dataTypesFromYaml', 'type': '[DataTypeCreationInfo]'}, 'data_type_mechanism': {'key': 'dataTypeMechanism', 'type': 'str'}, 'identifier_hash': {'key': 'identifierHash', 'type': 'str'}, 'identifier_hashes': {'key': 'identifierHashes', 'type': 'RegisterRegistryComponentMetaInfoIdentifierHashes'}, 'content_hash': {'key': 'contentHash', 'type': 'str'}, 'extra_hash': {'key': 'extraHash', 'type': 'str'}, 'extra_hashes': {'key': 'extraHashes', 'type': 'RegisterRegistryComponentMetaInfoExtraHashes'}, 'registration': {'key': 'registration', 'type': 'bool'}, 'validate_only': {'key': 'validateOnly', 'type': 'bool'}, 'skip_workspace_related_check': {'key': 'skipWorkspaceRelatedCheck', 'type': 'bool'}, 'intellectual_property_protected_workspace_component_registration_allowed_publisher': {'key': 'intellectualPropertyProtectedWorkspaceComponentRegistrationAllowedPublisher', 'type': '[str]'}, 'system_managed_registration': {'key': 'systemManagedRegistration', 'type': 'bool'}, 'allow_dup_name_between_input_and_ouput_port': {'key': 'allowDupNameBetweenInputAndOuputPort', 'type': 'bool'}, 'module_source': {'key': 'moduleSource', 'type': 'str'}, 'module_scope': {'key': 'moduleScope', 'type': 'str'}, 'module_additional_includes_count': {'key': 'moduleAdditionalIncludesCount', 'type': 'int'}, 'module_os_type': {'key': 'moduleOSType', 'type': 'str'}, 'module_codegen_by': {'key': 'moduleCodegenBy', 'type': 'str'}, 'module_client_source': {'key': 'moduleClientSource', 'type': 'str'}, 'module_is_builtin': {'key': 'moduleIsBuiltin', 'type': 'bool'}, 'module_register_event_extension_fields': {'key': 'moduleRegisterEventExtensionFields', 'type': '{str}'}, } def __init__( self, *, registry_name: Optional[str] = None, intellectual_property_publisher_information: Optional["IntellectualPropertyPublisherInformation"] = None, blob_reference_data: Optional[Dict[str, "RegistryBlobReferenceData"]] = None, aml_module_name: Optional[str] = None, name_only_display_info: Optional[str] = None, name: Optional[str] = None, version: Optional[str] = None, module_version_id: Optional[str] = None, snapshot_id: Optional[str] = None, component_registration_type: Optional[Union[str, "ComponentRegistrationTypeEnum"]] = None, module_entity_from_yaml: Optional["ModuleEntity"] = None, set_as_default_version: Optional[bool] = None, data_types_from_yaml: Optional[List["DataTypeCreationInfo"]] = None, data_type_mechanism: Optional[Union[str, "DataTypeMechanism"]] = None, identifier_hash: Optional[str] = None, identifier_hashes: Optional["RegisterRegistryComponentMetaInfoIdentifierHashes"] = None, content_hash: Optional[str] = None, extra_hash: Optional[str] = None, extra_hashes: Optional["RegisterRegistryComponentMetaInfoExtraHashes"] = None, registration: Optional[bool] = None, validate_only: Optional[bool] = None, skip_workspace_related_check: Optional[bool] = None, intellectual_property_protected_workspace_component_registration_allowed_publisher: Optional[List[str]] = None, system_managed_registration: Optional[bool] = None, allow_dup_name_between_input_and_ouput_port: Optional[bool] = None, module_source: Optional[str] = None, module_scope: Optional[str] = None, module_additional_includes_count: Optional[int] = None, module_os_type: Optional[str] = None, module_codegen_by: Optional[str] = None, module_client_source: Optional[str] = None, module_is_builtin: Optional[bool] = None, module_register_event_extension_fields: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword registry_name: :paramtype registry_name: str :keyword intellectual_property_publisher_information: :paramtype intellectual_property_publisher_information: ~flow.models.IntellectualPropertyPublisherInformation :keyword blob_reference_data: This is a dictionary. :paramtype blob_reference_data: dict[str, ~flow.models.RegistryBlobReferenceData] :keyword aml_module_name: :paramtype aml_module_name: str :keyword name_only_display_info: :paramtype name_only_display_info: str :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword module_version_id: :paramtype module_version_id: str :keyword snapshot_id: :paramtype snapshot_id: str :keyword component_registration_type: Possible values include: "Normal", "AnonymousAmlModule", "AnonymousAmlModuleVersion", "ModuleEntityOnly". :paramtype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum :keyword module_entity_from_yaml: :paramtype module_entity_from_yaml: ~flow.models.ModuleEntity :keyword set_as_default_version: :paramtype set_as_default_version: bool :keyword data_types_from_yaml: :paramtype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo] :keyword data_type_mechanism: Possible values include: "ErrorWhenNotExisting", "RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly". :paramtype data_type_mechanism: str or ~flow.models.DataTypeMechanism :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hashes: :paramtype identifier_hashes: ~flow.models.RegisterRegistryComponentMetaInfoIdentifierHashes :keyword content_hash: :paramtype content_hash: str :keyword extra_hash: :paramtype extra_hash: str :keyword extra_hashes: :paramtype extra_hashes: ~flow.models.RegisterRegistryComponentMetaInfoExtraHashes :keyword registration: :paramtype registration: bool :keyword validate_only: :paramtype validate_only: bool :keyword skip_workspace_related_check: :paramtype skip_workspace_related_check: bool :keyword intellectual_property_protected_workspace_component_registration_allowed_publisher: :paramtype intellectual_property_protected_workspace_component_registration_allowed_publisher: list[str] :keyword system_managed_registration: :paramtype system_managed_registration: bool :keyword allow_dup_name_between_input_and_ouput_port: :paramtype allow_dup_name_between_input_and_ouput_port: bool :keyword module_source: :paramtype module_source: str :keyword module_scope: :paramtype module_scope: str :keyword module_additional_includes_count: :paramtype module_additional_includes_count: int :keyword module_os_type: :paramtype module_os_type: str :keyword module_codegen_by: :paramtype module_codegen_by: str :keyword module_client_source: :paramtype module_client_source: str :keyword module_is_builtin: :paramtype module_is_builtin: bool :keyword module_register_event_extension_fields: Dictionary of :code:`<string>`. :paramtype module_register_event_extension_fields: dict[str, str] """ super(RegisterRegistryComponentMetaInfo, self).__init__(**kwargs) self.registry_name = registry_name self.intellectual_property_publisher_information = intellectual_property_publisher_information self.blob_reference_data = blob_reference_data self.aml_module_name = aml_module_name self.name_only_display_info = name_only_display_info self.name = name self.version = version self.module_version_id = module_version_id self.snapshot_id = snapshot_id self.component_registration_type = component_registration_type self.module_entity_from_yaml = module_entity_from_yaml self.set_as_default_version = set_as_default_version self.data_types_from_yaml = data_types_from_yaml self.data_type_mechanism = data_type_mechanism self.identifier_hash = identifier_hash self.identifier_hashes = identifier_hashes self.content_hash = content_hash self.extra_hash = extra_hash self.extra_hashes = extra_hashes self.registration = registration self.validate_only = validate_only self.skip_workspace_related_check = skip_workspace_related_check self.intellectual_property_protected_workspace_component_registration_allowed_publisher = intellectual_property_protected_workspace_component_registration_allowed_publisher self.system_managed_registration = system_managed_registration self.allow_dup_name_between_input_and_ouput_port = allow_dup_name_between_input_and_ouput_port self.module_source = module_source self.module_scope = module_scope self.module_additional_includes_count = module_additional_includes_count self.module_os_type = module_os_type self.module_codegen_by = module_codegen_by self.module_client_source = module_client_source self.module_is_builtin = module_is_builtin self.module_register_event_extension_fields = module_register_event_extension_fields class RegisterRegistryComponentMetaInfoExtraHashes(msrest.serialization.Model): """RegisterRegistryComponentMetaInfoExtraHashes. :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str """ _attribute_map = { 'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'}, } def __init__( self, *, identifier_hash: Optional[str] = None, identifier_hash_v2: Optional[str] = None, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str """ super(RegisterRegistryComponentMetaInfoExtraHashes, self).__init__(**kwargs) self.identifier_hash = identifier_hash self.identifier_hash_v2 = identifier_hash_v2 class RegisterRegistryComponentMetaInfoIdentifierHashes(msrest.serialization.Model): """RegisterRegistryComponentMetaInfoIdentifierHashes. :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str """ _attribute_map = { 'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'}, } def __init__( self, *, identifier_hash: Optional[str] = None, identifier_hash_v2: Optional[str] = None, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str """ super(RegisterRegistryComponentMetaInfoIdentifierHashes, self).__init__(**kwargs) self.identifier_hash = identifier_hash self.identifier_hash_v2 = identifier_hash_v2 class RegistrationOptions(msrest.serialization.Model): """RegistrationOptions. :ivar name: :vartype name: str :ivar version: :vartype version: str :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar properties: Dictionary of :code:`<string>`. :vartype properties: dict[str, str] :ivar dataset_registration_options: :vartype dataset_registration_options: ~flow.models.DatasetRegistrationOptions """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'dataset_registration_options': {'key': 'datasetRegistrationOptions', 'type': 'DatasetRegistrationOptions'}, } def __init__( self, *, name: Optional[str] = None, version: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, dataset_registration_options: Optional["DatasetRegistrationOptions"] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword version: :paramtype version: str :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword properties: Dictionary of :code:`<string>`. :paramtype properties: dict[str, str] :keyword dataset_registration_options: :paramtype dataset_registration_options: ~flow.models.DatasetRegistrationOptions """ super(RegistrationOptions, self).__init__(**kwargs) self.name = name self.version = version self.description = description self.tags = tags self.properties = properties self.dataset_registration_options = dataset_registration_options class RegistryBlobReferenceData(msrest.serialization.Model): """RegistryBlobReferenceData. :ivar data_reference_id: :vartype data_reference_id: str :ivar data: :vartype data: str """ _attribute_map = { 'data_reference_id': {'key': 'dataReferenceId', 'type': 'str'}, 'data': {'key': 'data', 'type': 'str'}, } def __init__( self, *, data_reference_id: Optional[str] = None, data: Optional[str] = None, **kwargs ): """ :keyword data_reference_id: :paramtype data_reference_id: str :keyword data: :paramtype data: str """ super(RegistryBlobReferenceData, self).__init__(**kwargs) self.data_reference_id = data_reference_id self.data = data class RegistryIdentity(msrest.serialization.Model): """RegistryIdentity. :ivar resource_id: :vartype resource_id: str :ivar client_id: :vartype client_id: str """ _attribute_map = { 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'client_id': {'key': 'clientId', 'type': 'str'}, } def __init__( self, *, resource_id: Optional[str] = None, client_id: Optional[str] = None, **kwargs ): """ :keyword resource_id: :paramtype resource_id: str :keyword client_id: :paramtype client_id: str """ super(RegistryIdentity, self).__init__(**kwargs) self.resource_id = resource_id self.client_id = client_id class Relationship(msrest.serialization.Model): """Relationship. Variables are only populated by the server, and will be ignored when sending a request. :ivar relation_type: :vartype relation_type: str :ivar target_entity_id: :vartype target_entity_id: str :ivar asset_id: :vartype asset_id: str :ivar entity_type: :vartype entity_type: str :ivar direction: :vartype direction: str :ivar entity_container_id: :vartype entity_container_id: str """ _validation = { 'entity_type': {'readonly': True}, 'entity_container_id': {'readonly': True}, } _attribute_map = { 'relation_type': {'key': 'relationType', 'type': 'str'}, 'target_entity_id': {'key': 'targetEntityId', 'type': 'str'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, 'entity_type': {'key': 'entityType', 'type': 'str'}, 'direction': {'key': 'direction', 'type': 'str'}, 'entity_container_id': {'key': 'entityContainerId', 'type': 'str'}, } def __init__( self, *, relation_type: Optional[str] = None, target_entity_id: Optional[str] = None, asset_id: Optional[str] = None, direction: Optional[str] = None, **kwargs ): """ :keyword relation_type: :paramtype relation_type: str :keyword target_entity_id: :paramtype target_entity_id: str :keyword asset_id: :paramtype asset_id: str :keyword direction: :paramtype direction: str """ super(Relationship, self).__init__(**kwargs) self.relation_type = relation_type self.target_entity_id = target_entity_id self.asset_id = asset_id self.entity_type = None self.direction = direction self.entity_container_id = None class RemoteDockerComputeInfo(msrest.serialization.Model): """RemoteDockerComputeInfo. :ivar address: :vartype address: str :ivar username: :vartype username: str :ivar password: :vartype password: str :ivar private_key: :vartype private_key: str """ _attribute_map = { 'address': {'key': 'address', 'type': 'str'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'private_key': {'key': 'privateKey', 'type': 'str'}, } def __init__( self, *, address: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, private_key: Optional[str] = None, **kwargs ): """ :keyword address: :paramtype address: str :keyword username: :paramtype username: str :keyword password: :paramtype password: str :keyword private_key: :paramtype private_key: str """ super(RemoteDockerComputeInfo, self).__init__(**kwargs) self.address = address self.username = username self.password = password self.private_key = private_key class ResourceConfig(msrest.serialization.Model): """ResourceConfig. :ivar gpu_count: :vartype gpu_count: int :ivar cpu_count: :vartype cpu_count: int :ivar memory_request_in_gb: :vartype memory_request_in_gb: int """ _attribute_map = { 'gpu_count': {'key': 'gpuCount', 'type': 'int'}, 'cpu_count': {'key': 'cpuCount', 'type': 'int'}, 'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'}, } def __init__( self, *, gpu_count: Optional[int] = None, cpu_count: Optional[int] = None, memory_request_in_gb: Optional[int] = None, **kwargs ): """ :keyword gpu_count: :paramtype gpu_count: int :keyword cpu_count: :paramtype cpu_count: int :keyword memory_request_in_gb: :paramtype memory_request_in_gb: int """ super(ResourceConfig, self).__init__(**kwargs) self.gpu_count = gpu_count self.cpu_count = cpu_count self.memory_request_in_gb = memory_request_in_gb class ResourceConfiguration(msrest.serialization.Model): """ResourceConfiguration. :ivar gpu_count: :vartype gpu_count: int :ivar cpu_count: :vartype cpu_count: int :ivar memory_request_in_gb: :vartype memory_request_in_gb: int """ _attribute_map = { 'gpu_count': {'key': 'gpuCount', 'type': 'int'}, 'cpu_count': {'key': 'cpuCount', 'type': 'int'}, 'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'}, } def __init__( self, *, gpu_count: Optional[int] = None, cpu_count: Optional[int] = None, memory_request_in_gb: Optional[int] = None, **kwargs ): """ :keyword gpu_count: :paramtype gpu_count: int :keyword cpu_count: :paramtype cpu_count: int :keyword memory_request_in_gb: :paramtype memory_request_in_gb: int """ super(ResourceConfiguration, self).__init__(**kwargs) self.gpu_count = gpu_count self.cpu_count = cpu_count self.memory_request_in_gb = memory_request_in_gb class ResourcesSetting(msrest.serialization.Model): """ResourcesSetting. :ivar instance_size: :vartype instance_size: str :ivar spark_version: :vartype spark_version: str """ _attribute_map = { 'instance_size': {'key': 'instanceSize', 'type': 'str'}, 'spark_version': {'key': 'sparkVersion', 'type': 'str'}, } def __init__( self, *, instance_size: Optional[str] = None, spark_version: Optional[str] = None, **kwargs ): """ :keyword instance_size: :paramtype instance_size: str :keyword spark_version: :paramtype spark_version: str """ super(ResourcesSetting, self).__init__(**kwargs) self.instance_size = instance_size self.spark_version = spark_version class RetrieveToolFuncResultRequest(msrest.serialization.Model): """RetrieveToolFuncResultRequest. :ivar func_path: :vartype func_path: str :ivar func_kwargs: This is a dictionary. :vartype func_kwargs: dict[str, any] :ivar func_call_scenario: Possible values include: "generated_by", "reverse_generated_by", "dynamic_list". :vartype func_call_scenario: str or ~flow.models.ToolFuncCallScenario """ _attribute_map = { 'func_path': {'key': 'func_path', 'type': 'str'}, 'func_kwargs': {'key': 'func_kwargs', 'type': '{object}'}, 'func_call_scenario': {'key': 'func_call_scenario', 'type': 'str'}, } def __init__( self, *, func_path: Optional[str] = None, func_kwargs: Optional[Dict[str, Any]] = None, func_call_scenario: Optional[Union[str, "ToolFuncCallScenario"]] = None, **kwargs ): """ :keyword func_path: :paramtype func_path: str :keyword func_kwargs: This is a dictionary. :paramtype func_kwargs: dict[str, any] :keyword func_call_scenario: Possible values include: "generated_by", "reverse_generated_by", "dynamic_list". :paramtype func_call_scenario: str or ~flow.models.ToolFuncCallScenario """ super(RetrieveToolFuncResultRequest, self).__init__(**kwargs) self.func_path = func_path self.func_kwargs = func_kwargs self.func_call_scenario = func_call_scenario class RetryConfiguration(msrest.serialization.Model): """RetryConfiguration. :ivar max_retry_count: :vartype max_retry_count: int """ _attribute_map = { 'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'}, } def __init__( self, *, max_retry_count: Optional[int] = None, **kwargs ): """ :keyword max_retry_count: :paramtype max_retry_count: int """ super(RetryConfiguration, self).__init__(**kwargs) self.max_retry_count = max_retry_count class RGitHubPackage(msrest.serialization.Model): """RGitHubPackage. :ivar repository: :vartype repository: str :ivar auth_token: :vartype auth_token: str """ _attribute_map = { 'repository': {'key': 'repository', 'type': 'str'}, 'auth_token': {'key': 'authToken', 'type': 'str'}, } def __init__( self, *, repository: Optional[str] = None, auth_token: Optional[str] = None, **kwargs ): """ :keyword repository: :paramtype repository: str :keyword auth_token: :paramtype auth_token: str """ super(RGitHubPackage, self).__init__(**kwargs) self.repository = repository self.auth_token = auth_token class RootError(msrest.serialization.Model): """The root error. :ivar code: The service-defined error code. Supported error codes: ServiceError, UserError, ValidationError, AzureStorageError, TransientError, RequestThrottled. :vartype code: str :ivar severity: The Severity of error. :vartype severity: int :ivar message: A human-readable representation of the error. :vartype message: str :ivar message_format: An unformatted version of the message with no variable substitution. :vartype message_format: str :ivar message_parameters: Value substitutions corresponding to the contents of MessageFormat. :vartype message_parameters: dict[str, str] :ivar reference_code: This code can optionally be set by the system generating the error. It should be used to classify the problem and identify the module and code area where the failure occured. :vartype reference_code: str :ivar details_uri: A URI which points to more details about the context of the error. :vartype details_uri: str :ivar target: The target of the error (e.g., the name of the property in error). :vartype target: str :ivar details: The related errors that occurred during the request. :vartype details: list[~flow.models.RootError] :ivar inner_error: A nested structure of errors. :vartype inner_error: ~flow.models.InnerErrorResponse :ivar additional_info: The error additional info. :vartype additional_info: list[~flow.models.ErrorAdditionalInfo] """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'severity': {'key': 'severity', 'type': 'int'}, 'message': {'key': 'message', 'type': 'str'}, 'message_format': {'key': 'messageFormat', 'type': 'str'}, 'message_parameters': {'key': 'messageParameters', 'type': '{str}'}, 'reference_code': {'key': 'referenceCode', 'type': 'str'}, 'details_uri': {'key': 'detailsUri', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[RootError]'}, 'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'}, 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, } def __init__( self, *, code: Optional[str] = None, severity: Optional[int] = None, message: Optional[str] = None, message_format: Optional[str] = None, message_parameters: Optional[Dict[str, str]] = None, reference_code: Optional[str] = None, details_uri: Optional[str] = None, target: Optional[str] = None, details: Optional[List["RootError"]] = None, inner_error: Optional["InnerErrorResponse"] = None, additional_info: Optional[List["ErrorAdditionalInfo"]] = None, **kwargs ): """ :keyword code: The service-defined error code. Supported error codes: ServiceError, UserError, ValidationError, AzureStorageError, TransientError, RequestThrottled. :paramtype code: str :keyword severity: The Severity of error. :paramtype severity: int :keyword message: A human-readable representation of the error. :paramtype message: str :keyword message_format: An unformatted version of the message with no variable substitution. :paramtype message_format: str :keyword message_parameters: Value substitutions corresponding to the contents of MessageFormat. :paramtype message_parameters: dict[str, str] :keyword reference_code: This code can optionally be set by the system generating the error. It should be used to classify the problem and identify the module and code area where the failure occured. :paramtype reference_code: str :keyword details_uri: A URI which points to more details about the context of the error. :paramtype details_uri: str :keyword target: The target of the error (e.g., the name of the property in error). :paramtype target: str :keyword details: The related errors that occurred during the request. :paramtype details: list[~flow.models.RootError] :keyword inner_error: A nested structure of errors. :paramtype inner_error: ~flow.models.InnerErrorResponse :keyword additional_info: The error additional info. :paramtype additional_info: list[~flow.models.ErrorAdditionalInfo] """ super(RootError, self).__init__(**kwargs) self.code = code self.severity = severity self.message = message self.message_format = message_format self.message_parameters = message_parameters self.reference_code = reference_code self.details_uri = details_uri self.target = target self.details = details self.inner_error = inner_error self.additional_info = additional_info class RSection(msrest.serialization.Model): """RSection. :ivar r_version: :vartype r_version: str :ivar user_managed: :vartype user_managed: bool :ivar rscript_path: :vartype rscript_path: str :ivar snapshot_date: :vartype snapshot_date: str :ivar cran_packages: :vartype cran_packages: list[~flow.models.RCranPackage] :ivar git_hub_packages: :vartype git_hub_packages: list[~flow.models.RGitHubPackage] :ivar custom_url_packages: :vartype custom_url_packages: list[str] :ivar bio_conductor_packages: :vartype bio_conductor_packages: list[str] """ _attribute_map = { 'r_version': {'key': 'rVersion', 'type': 'str'}, 'user_managed': {'key': 'userManaged', 'type': 'bool'}, 'rscript_path': {'key': 'rscriptPath', 'type': 'str'}, 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'}, 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'}, 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'}, 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'}, 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'}, } def __init__( self, *, r_version: Optional[str] = None, user_managed: Optional[bool] = None, rscript_path: Optional[str] = None, snapshot_date: Optional[str] = None, cran_packages: Optional[List["RCranPackage"]] = None, git_hub_packages: Optional[List["RGitHubPackage"]] = None, custom_url_packages: Optional[List[str]] = None, bio_conductor_packages: Optional[List[str]] = None, **kwargs ): """ :keyword r_version: :paramtype r_version: str :keyword user_managed: :paramtype user_managed: bool :keyword rscript_path: :paramtype rscript_path: str :keyword snapshot_date: :paramtype snapshot_date: str :keyword cran_packages: :paramtype cran_packages: list[~flow.models.RCranPackage] :keyword git_hub_packages: :paramtype git_hub_packages: list[~flow.models.RGitHubPackage] :keyword custom_url_packages: :paramtype custom_url_packages: list[str] :keyword bio_conductor_packages: :paramtype bio_conductor_packages: list[str] """ super(RSection, self).__init__(**kwargs) self.r_version = r_version self.user_managed = user_managed self.rscript_path = rscript_path self.snapshot_date = snapshot_date self.cran_packages = cran_packages self.git_hub_packages = git_hub_packages self.custom_url_packages = custom_url_packages self.bio_conductor_packages = bio_conductor_packages class RunAnnotations(msrest.serialization.Model): """RunAnnotations. :ivar display_name: :vartype display_name: str :ivar status: :vartype status: str :ivar primary_metric_name: :vartype primary_metric_name: str :ivar estimated_cost: :vartype estimated_cost: float :ivar primary_metric_summary: :vartype primary_metric_summary: ~flow.models.RunIndexMetricSummary :ivar metrics: Dictionary of :code:`<RunIndexMetricSummarySystemObject>`. :vartype metrics: dict[str, ~flow.models.RunIndexMetricSummarySystemObject] :ivar parameters: Dictionary of :code:`<any>`. :vartype parameters: dict[str, any] :ivar settings: Dictionary of :code:`<string>`. :vartype settings: dict[str, str] :ivar modified_time: :vartype modified_time: ~datetime.datetime :ivar retain_for_lifetime_of_workspace: :vartype retain_for_lifetime_of_workspace: bool :ivar error: :vartype error: ~flow.models.IndexedErrorResponse :ivar resource_metric_summary: :vartype resource_metric_summary: ~flow.models.RunIndexResourceMetricSummary :ivar job_cost: :vartype job_cost: ~flow.models.JobCost :ivar compute_duration: :vartype compute_duration: str :ivar compute_duration_milliseconds: :vartype compute_duration_milliseconds: float :ivar effective_start_time_utc: :vartype effective_start_time_utc: ~datetime.datetime :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar archived: :vartype archived: bool :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] """ _attribute_map = { 'display_name': {'key': 'displayName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'}, 'estimated_cost': {'key': 'estimatedCost', 'type': 'float'}, 'primary_metric_summary': {'key': 'primaryMetricSummary', 'type': 'RunIndexMetricSummary'}, 'metrics': {'key': 'metrics', 'type': '{RunIndexMetricSummarySystemObject}'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, 'settings': {'key': 'settings', 'type': '{str}'}, 'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'}, 'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'}, 'error': {'key': 'error', 'type': 'IndexedErrorResponse'}, 'resource_metric_summary': {'key': 'resourceMetricSummary', 'type': 'RunIndexResourceMetricSummary'}, 'job_cost': {'key': 'jobCost', 'type': 'JobCost'}, 'compute_duration': {'key': 'computeDuration', 'type': 'str'}, 'compute_duration_milliseconds': {'key': 'computeDurationMilliseconds', 'type': 'float'}, 'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'archived': {'key': 'archived', 'type': 'bool'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( self, *, display_name: Optional[str] = None, status: Optional[str] = None, primary_metric_name: Optional[str] = None, estimated_cost: Optional[float] = None, primary_metric_summary: Optional["RunIndexMetricSummary"] = None, metrics: Optional[Dict[str, "RunIndexMetricSummarySystemObject"]] = None, parameters: Optional[Dict[str, Any]] = None, settings: Optional[Dict[str, str]] = None, modified_time: Optional[datetime.datetime] = None, retain_for_lifetime_of_workspace: Optional[bool] = None, error: Optional["IndexedErrorResponse"] = None, resource_metric_summary: Optional["RunIndexResourceMetricSummary"] = None, job_cost: Optional["JobCost"] = None, compute_duration: Optional[str] = None, compute_duration_milliseconds: Optional[float] = None, effective_start_time_utc: Optional[datetime.datetime] = None, name: Optional[str] = None, description: Optional[str] = None, archived: Optional[bool] = None, tags: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword display_name: :paramtype display_name: str :keyword status: :paramtype status: str :keyword primary_metric_name: :paramtype primary_metric_name: str :keyword estimated_cost: :paramtype estimated_cost: float :keyword primary_metric_summary: :paramtype primary_metric_summary: ~flow.models.RunIndexMetricSummary :keyword metrics: Dictionary of :code:`<RunIndexMetricSummarySystemObject>`. :paramtype metrics: dict[str, ~flow.models.RunIndexMetricSummarySystemObject] :keyword parameters: Dictionary of :code:`<any>`. :paramtype parameters: dict[str, any] :keyword settings: Dictionary of :code:`<string>`. :paramtype settings: dict[str, str] :keyword modified_time: :paramtype modified_time: ~datetime.datetime :keyword retain_for_lifetime_of_workspace: :paramtype retain_for_lifetime_of_workspace: bool :keyword error: :paramtype error: ~flow.models.IndexedErrorResponse :keyword resource_metric_summary: :paramtype resource_metric_summary: ~flow.models.RunIndexResourceMetricSummary :keyword job_cost: :paramtype job_cost: ~flow.models.JobCost :keyword compute_duration: :paramtype compute_duration: str :keyword compute_duration_milliseconds: :paramtype compute_duration_milliseconds: float :keyword effective_start_time_utc: :paramtype effective_start_time_utc: ~datetime.datetime :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword archived: :paramtype archived: bool :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] """ super(RunAnnotations, self).__init__(**kwargs) self.display_name = display_name self.status = status self.primary_metric_name = primary_metric_name self.estimated_cost = estimated_cost self.primary_metric_summary = primary_metric_summary self.metrics = metrics self.parameters = parameters self.settings = settings self.modified_time = modified_time self.retain_for_lifetime_of_workspace = retain_for_lifetime_of_workspace self.error = error self.resource_metric_summary = resource_metric_summary self.job_cost = job_cost self.compute_duration = compute_duration self.compute_duration_milliseconds = compute_duration_milliseconds self.effective_start_time_utc = effective_start_time_utc self.name = name self.description = description self.archived = archived self.tags = tags class RunConfiguration(msrest.serialization.Model): """RunConfiguration. :ivar script: :vartype script: str :ivar script_type: Possible values include: "Python", "Notebook". :vartype script_type: str or ~flow.models.ScriptType :ivar command: :vartype command: str :ivar use_absolute_path: :vartype use_absolute_path: bool :ivar arguments: :vartype arguments: list[str] :ivar framework: Possible values include: "Python", "PySpark", "Cntk", "TensorFlow", "PyTorch", "PySparkInteractive", "R". :vartype framework: str or ~flow.models.Framework :ivar communicator: Possible values include: "None", "ParameterServer", "Gloo", "Mpi", "Nccl", "ParallelTask". :vartype communicator: str or ~flow.models.Communicator :ivar target: :vartype target: str :ivar auto_cluster_compute_specification: :vartype auto_cluster_compute_specification: ~flow.models.AutoClusterComputeSpecification :ivar data_references: Dictionary of :code:`<DataReferenceConfiguration>`. :vartype data_references: dict[str, ~flow.models.DataReferenceConfiguration] :ivar data: Dictionary of :code:`<Data>`. :vartype data: dict[str, ~flow.models.Data] :ivar input_assets: Dictionary of :code:`<InputAsset>`. :vartype input_assets: dict[str, ~flow.models.InputAsset] :ivar output_data: Dictionary of :code:`<OutputData>`. :vartype output_data: dict[str, ~flow.models.OutputData] :ivar datacaches: :vartype datacaches: list[~flow.models.DatacacheConfiguration] :ivar job_name: :vartype job_name: str :ivar max_run_duration_seconds: :vartype max_run_duration_seconds: long :ivar node_count: :vartype node_count: int :ivar max_node_count: :vartype max_node_count: int :ivar instance_types: :vartype instance_types: list[str] :ivar priority: :vartype priority: int :ivar credential_passthrough: :vartype credential_passthrough: bool :ivar identity: :vartype identity: ~flow.models.IdentityConfiguration :ivar environment: :vartype environment: ~flow.models.EnvironmentDefinition :ivar history: :vartype history: ~flow.models.HistoryConfiguration :ivar spark: :vartype spark: ~flow.models.SparkConfiguration :ivar parallel_task: :vartype parallel_task: ~flow.models.ParallelTaskConfiguration :ivar tensorflow: :vartype tensorflow: ~flow.models.TensorflowConfiguration :ivar mpi: :vartype mpi: ~flow.models.MpiConfiguration :ivar py_torch: :vartype py_torch: ~flow.models.PyTorchConfiguration :ivar ray: :vartype ray: ~flow.models.RayConfiguration :ivar hdi: :vartype hdi: ~flow.models.HdiConfiguration :ivar docker: :vartype docker: ~flow.models.DockerConfiguration :ivar command_return_code_config: :vartype command_return_code_config: ~flow.models.CommandReturnCodeConfig :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`. :vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration] :ivar parameters: :vartype parameters: list[~flow.models.ParameterDefinition] :ivar autologger_settings: :vartype autologger_settings: ~flow.models.AutologgerSettings :ivar data_bricks: :vartype data_bricks: ~flow.models.DatabricksConfiguration :ivar training_diagnostic_config: :vartype training_diagnostic_config: ~flow.models.TrainingDiagnosticConfiguration :ivar secrets_configuration: Dictionary of :code:`<SecretConfiguration>`. :vartype secrets_configuration: dict[str, ~flow.models.SecretConfiguration] """ _attribute_map = { 'script': {'key': 'script', 'type': 'str'}, 'script_type': {'key': 'scriptType', 'type': 'str'}, 'command': {'key': 'command', 'type': 'str'}, 'use_absolute_path': {'key': 'useAbsolutePath', 'type': 'bool'}, 'arguments': {'key': 'arguments', 'type': '[str]'}, 'framework': {'key': 'framework', 'type': 'str'}, 'communicator': {'key': 'communicator', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'auto_cluster_compute_specification': {'key': 'autoClusterComputeSpecification', 'type': 'AutoClusterComputeSpecification'}, 'data_references': {'key': 'dataReferences', 'type': '{DataReferenceConfiguration}'}, 'data': {'key': 'data', 'type': '{Data}'}, 'input_assets': {'key': 'inputAssets', 'type': '{InputAsset}'}, 'output_data': {'key': 'outputData', 'type': '{OutputData}'}, 'datacaches': {'key': 'datacaches', 'type': '[DatacacheConfiguration]'}, 'job_name': {'key': 'jobName', 'type': 'str'}, 'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'long'}, 'node_count': {'key': 'nodeCount', 'type': 'int'}, 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, 'instance_types': {'key': 'instanceTypes', 'type': '[str]'}, 'priority': {'key': 'priority', 'type': 'int'}, 'credential_passthrough': {'key': 'credentialPassthrough', 'type': 'bool'}, 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, 'environment': {'key': 'environment', 'type': 'EnvironmentDefinition'}, 'history': {'key': 'history', 'type': 'HistoryConfiguration'}, 'spark': {'key': 'spark', 'type': 'SparkConfiguration'}, 'parallel_task': {'key': 'parallelTask', 'type': 'ParallelTaskConfiguration'}, 'tensorflow': {'key': 'tensorflow', 'type': 'TensorflowConfiguration'}, 'mpi': {'key': 'mpi', 'type': 'MpiConfiguration'}, 'py_torch': {'key': 'pyTorch', 'type': 'PyTorchConfiguration'}, 'ray': {'key': 'ray', 'type': 'RayConfiguration'}, 'hdi': {'key': 'hdi', 'type': 'HdiConfiguration'}, 'docker': {'key': 'docker', 'type': 'DockerConfiguration'}, 'command_return_code_config': {'key': 'commandReturnCodeConfig', 'type': 'CommandReturnCodeConfig'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'}, 'parameters': {'key': 'parameters', 'type': '[ParameterDefinition]'}, 'autologger_settings': {'key': 'autologgerSettings', 'type': 'AutologgerSettings'}, 'data_bricks': {'key': 'dataBricks', 'type': 'DatabricksConfiguration'}, 'training_diagnostic_config': {'key': 'trainingDiagnosticConfig', 'type': 'TrainingDiagnosticConfiguration'}, 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, } def __init__( self, *, script: Optional[str] = None, script_type: Optional[Union[str, "ScriptType"]] = None, command: Optional[str] = None, use_absolute_path: Optional[bool] = None, arguments: Optional[List[str]] = None, framework: Optional[Union[str, "Framework"]] = None, communicator: Optional[Union[str, "Communicator"]] = None, target: Optional[str] = None, auto_cluster_compute_specification: Optional["AutoClusterComputeSpecification"] = None, data_references: Optional[Dict[str, "DataReferenceConfiguration"]] = None, data: Optional[Dict[str, "Data"]] = None, input_assets: Optional[Dict[str, "InputAsset"]] = None, output_data: Optional[Dict[str, "OutputData"]] = None, datacaches: Optional[List["DatacacheConfiguration"]] = None, job_name: Optional[str] = None, max_run_duration_seconds: Optional[int] = None, node_count: Optional[int] = None, max_node_count: Optional[int] = None, instance_types: Optional[List[str]] = None, priority: Optional[int] = None, credential_passthrough: Optional[bool] = None, identity: Optional["IdentityConfiguration"] = None, environment: Optional["EnvironmentDefinition"] = None, history: Optional["HistoryConfiguration"] = None, spark: Optional["SparkConfiguration"] = None, parallel_task: Optional["ParallelTaskConfiguration"] = None, tensorflow: Optional["TensorflowConfiguration"] = None, mpi: Optional["MpiConfiguration"] = None, py_torch: Optional["PyTorchConfiguration"] = None, ray: Optional["RayConfiguration"] = None, hdi: Optional["HdiConfiguration"] = None, docker: Optional["DockerConfiguration"] = None, command_return_code_config: Optional["CommandReturnCodeConfig"] = None, environment_variables: Optional[Dict[str, str]] = None, application_endpoints: Optional[Dict[str, "ApplicationEndpointConfiguration"]] = None, parameters: Optional[List["ParameterDefinition"]] = None, autologger_settings: Optional["AutologgerSettings"] = None, data_bricks: Optional["DatabricksConfiguration"] = None, training_diagnostic_config: Optional["TrainingDiagnosticConfiguration"] = None, secrets_configuration: Optional[Dict[str, "SecretConfiguration"]] = None, **kwargs ): """ :keyword script: :paramtype script: str :keyword script_type: Possible values include: "Python", "Notebook". :paramtype script_type: str or ~flow.models.ScriptType :keyword command: :paramtype command: str :keyword use_absolute_path: :paramtype use_absolute_path: bool :keyword arguments: :paramtype arguments: list[str] :keyword framework: Possible values include: "Python", "PySpark", "Cntk", "TensorFlow", "PyTorch", "PySparkInteractive", "R". :paramtype framework: str or ~flow.models.Framework :keyword communicator: Possible values include: "None", "ParameterServer", "Gloo", "Mpi", "Nccl", "ParallelTask". :paramtype communicator: str or ~flow.models.Communicator :keyword target: :paramtype target: str :keyword auto_cluster_compute_specification: :paramtype auto_cluster_compute_specification: ~flow.models.AutoClusterComputeSpecification :keyword data_references: Dictionary of :code:`<DataReferenceConfiguration>`. :paramtype data_references: dict[str, ~flow.models.DataReferenceConfiguration] :keyword data: Dictionary of :code:`<Data>`. :paramtype data: dict[str, ~flow.models.Data] :keyword input_assets: Dictionary of :code:`<InputAsset>`. :paramtype input_assets: dict[str, ~flow.models.InputAsset] :keyword output_data: Dictionary of :code:`<OutputData>`. :paramtype output_data: dict[str, ~flow.models.OutputData] :keyword datacaches: :paramtype datacaches: list[~flow.models.DatacacheConfiguration] :keyword job_name: :paramtype job_name: str :keyword max_run_duration_seconds: :paramtype max_run_duration_seconds: long :keyword node_count: :paramtype node_count: int :keyword max_node_count: :paramtype max_node_count: int :keyword instance_types: :paramtype instance_types: list[str] :keyword priority: :paramtype priority: int :keyword credential_passthrough: :paramtype credential_passthrough: bool :keyword identity: :paramtype identity: ~flow.models.IdentityConfiguration :keyword environment: :paramtype environment: ~flow.models.EnvironmentDefinition :keyword history: :paramtype history: ~flow.models.HistoryConfiguration :keyword spark: :paramtype spark: ~flow.models.SparkConfiguration :keyword parallel_task: :paramtype parallel_task: ~flow.models.ParallelTaskConfiguration :keyword tensorflow: :paramtype tensorflow: ~flow.models.TensorflowConfiguration :keyword mpi: :paramtype mpi: ~flow.models.MpiConfiguration :keyword py_torch: :paramtype py_torch: ~flow.models.PyTorchConfiguration :keyword ray: :paramtype ray: ~flow.models.RayConfiguration :keyword hdi: :paramtype hdi: ~flow.models.HdiConfiguration :keyword docker: :paramtype docker: ~flow.models.DockerConfiguration :keyword command_return_code_config: :paramtype command_return_code_config: ~flow.models.CommandReturnCodeConfig :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`. :paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration] :keyword parameters: :paramtype parameters: list[~flow.models.ParameterDefinition] :keyword autologger_settings: :paramtype autologger_settings: ~flow.models.AutologgerSettings :keyword data_bricks: :paramtype data_bricks: ~flow.models.DatabricksConfiguration :keyword training_diagnostic_config: :paramtype training_diagnostic_config: ~flow.models.TrainingDiagnosticConfiguration :keyword secrets_configuration: Dictionary of :code:`<SecretConfiguration>`. :paramtype secrets_configuration: dict[str, ~flow.models.SecretConfiguration] """ super(RunConfiguration, self).__init__(**kwargs) self.script = script self.script_type = script_type self.command = command self.use_absolute_path = use_absolute_path self.arguments = arguments self.framework = framework self.communicator = communicator self.target = target self.auto_cluster_compute_specification = auto_cluster_compute_specification self.data_references = data_references self.data = data self.input_assets = input_assets self.output_data = output_data self.datacaches = datacaches self.job_name = job_name self.max_run_duration_seconds = max_run_duration_seconds self.node_count = node_count self.max_node_count = max_node_count self.instance_types = instance_types self.priority = priority self.credential_passthrough = credential_passthrough self.identity = identity self.environment = environment self.history = history self.spark = spark self.parallel_task = parallel_task self.tensorflow = tensorflow self.mpi = mpi self.py_torch = py_torch self.ray = ray self.hdi = hdi self.docker = docker self.command_return_code_config = command_return_code_config self.environment_variables = environment_variables self.application_endpoints = application_endpoints self.parameters = parameters self.autologger_settings = autologger_settings self.data_bricks = data_bricks self.training_diagnostic_config = training_diagnostic_config self.secrets_configuration = secrets_configuration class RunDatasetReference(msrest.serialization.Model): """RunDatasetReference. :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar version: :vartype version: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, version: Optional[str] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword version: :paramtype version: str """ super(RunDatasetReference, self).__init__(**kwargs) self.id = id self.name = name self.version = version class RunDefinition(msrest.serialization.Model): """RunDefinition. :ivar configuration: :vartype configuration: ~flow.models.RunConfiguration :ivar snapshot_id: :vartype snapshot_id: str :ivar snapshots: :vartype snapshots: list[~flow.models.Snapshot] :ivar parent_run_id: :vartype parent_run_id: str :ivar run_type: :vartype run_type: str :ivar display_name: :vartype display_name: str :ivar environment_asset_id: :vartype environment_asset_id: str :ivar primary_metric_name: :vartype primary_metric_name: str :ivar description: :vartype description: str :ivar cancel_reason: :vartype cancel_reason: str :ivar properties: Dictionary of :code:`<string>`. :vartype properties: dict[str, str] :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] """ _attribute_map = { 'configuration': {'key': 'configuration', 'type': 'RunConfiguration'}, 'snapshot_id': {'key': 'snapshotId', 'type': 'str'}, 'snapshots': {'key': 'snapshots', 'type': '[Snapshot]'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'run_type': {'key': 'runType', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'}, 'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'cancel_reason': {'key': 'cancelReason', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( self, *, configuration: Optional["RunConfiguration"] = None, snapshot_id: Optional[str] = None, snapshots: Optional[List["Snapshot"]] = None, parent_run_id: Optional[str] = None, run_type: Optional[str] = None, display_name: Optional[str] = None, environment_asset_id: Optional[str] = None, primary_metric_name: Optional[str] = None, description: Optional[str] = None, cancel_reason: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword configuration: :paramtype configuration: ~flow.models.RunConfiguration :keyword snapshot_id: :paramtype snapshot_id: str :keyword snapshots: :paramtype snapshots: list[~flow.models.Snapshot] :keyword parent_run_id: :paramtype parent_run_id: str :keyword run_type: :paramtype run_type: str :keyword display_name: :paramtype display_name: str :keyword environment_asset_id: :paramtype environment_asset_id: str :keyword primary_metric_name: :paramtype primary_metric_name: str :keyword description: :paramtype description: str :keyword cancel_reason: :paramtype cancel_reason: str :keyword properties: Dictionary of :code:`<string>`. :paramtype properties: dict[str, str] :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] """ super(RunDefinition, self).__init__(**kwargs) self.configuration = configuration self.snapshot_id = snapshot_id self.snapshots = snapshots self.parent_run_id = parent_run_id self.run_type = run_type self.display_name = display_name self.environment_asset_id = environment_asset_id self.primary_metric_name = primary_metric_name self.description = description self.cancel_reason = cancel_reason self.properties = properties self.tags = tags class RunDetailsDto(msrest.serialization.Model): """RunDetailsDto. :ivar run_id: :vartype run_id: str :ivar run_uuid: :vartype run_uuid: str :ivar parent_run_uuid: :vartype parent_run_uuid: str :ivar root_run_uuid: :vartype root_run_uuid: str :ivar target: :vartype target: str :ivar status: :vartype status: str :ivar parent_run_id: :vartype parent_run_id: str :ivar data_container_id: :vartype data_container_id: str :ivar created_time_utc: :vartype created_time_utc: ~datetime.datetime :ivar start_time_utc: :vartype start_time_utc: ~datetime.datetime :ivar end_time_utc: :vartype end_time_utc: ~datetime.datetime :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse :ivar warnings: :vartype warnings: list[~flow.models.RunDetailsWarningDto] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar parameters: Dictionary of :code:`<any>`. :vartype parameters: dict[str, any] :ivar services: This is a dictionary. :vartype services: dict[str, ~flow.models.EndpointSetting] :ivar input_datasets: :vartype input_datasets: list[~flow.models.DatasetLineage] :ivar output_datasets: :vartype output_datasets: list[~flow.models.OutputDatasetLineage] :ivar run_definition: Anything. :vartype run_definition: any :ivar log_files: This is a dictionary. :vartype log_files: dict[str, str] :ivar job_cost: :vartype job_cost: ~flow.models.JobCost :ivar revision: :vartype revision: long :ivar run_type_v2: :vartype run_type_v2: ~flow.models.RunTypeV2 :ivar settings: This is a dictionary. :vartype settings: dict[str, str] :ivar compute_request: :vartype compute_request: ~flow.models.ComputeRequest :ivar compute: :vartype compute: ~flow.models.Compute :ivar created_by: :vartype created_by: ~flow.models.User :ivar compute_duration: :vartype compute_duration: str :ivar effective_start_time_utc: :vartype effective_start_time_utc: ~datetime.datetime :ivar run_number: :vartype run_number: int :ivar root_run_id: :vartype root_run_id: str :ivar experiment_id: :vartype experiment_id: str :ivar user_id: :vartype user_id: str :ivar status_revision: :vartype status_revision: long :ivar current_compute_time: :vartype current_compute_time: str :ivar last_start_time_utc: :vartype last_start_time_utc: ~datetime.datetime :ivar last_modified_by: :vartype last_modified_by: ~flow.models.User :ivar last_modified_utc: :vartype last_modified_utc: ~datetime.datetime :ivar duration: :vartype duration: str :ivar inputs: Dictionary of :code:`<TypedAssetReference>`. :vartype inputs: dict[str, ~flow.models.TypedAssetReference] :ivar outputs: Dictionary of :code:`<TypedAssetReference>`. :vartype outputs: dict[str, ~flow.models.TypedAssetReference] :ivar current_attempt_id: :vartype current_attempt_id: int """ _validation = { 'input_datasets': {'unique': True}, 'output_datasets': {'unique': True}, } _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, 'run_uuid': {'key': 'runUuid', 'type': 'str'}, 'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'}, 'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'data_container_id': {'key': 'dataContainerId', 'type': 'str'}, 'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'}, 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'}, 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, 'warnings': {'key': 'warnings', 'type': '[RunDetailsWarningDto]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, 'services': {'key': 'services', 'type': '{EndpointSetting}'}, 'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'}, 'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'}, 'run_definition': {'key': 'runDefinition', 'type': 'object'}, 'log_files': {'key': 'logFiles', 'type': '{str}'}, 'job_cost': {'key': 'jobCost', 'type': 'JobCost'}, 'revision': {'key': 'revision', 'type': 'long'}, 'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'}, 'settings': {'key': 'settings', 'type': '{str}'}, 'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'}, 'compute': {'key': 'compute', 'type': 'Compute'}, 'created_by': {'key': 'createdBy', 'type': 'User'}, 'compute_duration': {'key': 'computeDuration', 'type': 'str'}, 'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'}, 'run_number': {'key': 'runNumber', 'type': 'int'}, 'root_run_id': {'key': 'rootRunId', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'user_id': {'key': 'userId', 'type': 'str'}, 'status_revision': {'key': 'statusRevision', 'type': 'long'}, 'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'}, 'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'}, 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'}, 'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'}, 'duration': {'key': 'duration', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'}, 'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'}, 'current_attempt_id': {'key': 'currentAttemptId', 'type': 'int'}, } def __init__( self, *, run_id: Optional[str] = None, run_uuid: Optional[str] = None, parent_run_uuid: Optional[str] = None, root_run_uuid: Optional[str] = None, target: Optional[str] = None, status: Optional[str] = None, parent_run_id: Optional[str] = None, data_container_id: Optional[str] = None, created_time_utc: Optional[datetime.datetime] = None, start_time_utc: Optional[datetime.datetime] = None, end_time_utc: Optional[datetime.datetime] = None, error: Optional["ErrorResponse"] = None, warnings: Optional[List["RunDetailsWarningDto"]] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, parameters: Optional[Dict[str, Any]] = None, services: Optional[Dict[str, "EndpointSetting"]] = None, input_datasets: Optional[List["DatasetLineage"]] = None, output_datasets: Optional[List["OutputDatasetLineage"]] = None, run_definition: Optional[Any] = None, log_files: Optional[Dict[str, str]] = None, job_cost: Optional["JobCost"] = None, revision: Optional[int] = None, run_type_v2: Optional["RunTypeV2"] = None, settings: Optional[Dict[str, str]] = None, compute_request: Optional["ComputeRequest"] = None, compute: Optional["Compute"] = None, created_by: Optional["User"] = None, compute_duration: Optional[str] = None, effective_start_time_utc: Optional[datetime.datetime] = None, run_number: Optional[int] = None, root_run_id: Optional[str] = None, experiment_id: Optional[str] = None, user_id: Optional[str] = None, status_revision: Optional[int] = None, current_compute_time: Optional[str] = None, last_start_time_utc: Optional[datetime.datetime] = None, last_modified_by: Optional["User"] = None, last_modified_utc: Optional[datetime.datetime] = None, duration: Optional[str] = None, inputs: Optional[Dict[str, "TypedAssetReference"]] = None, outputs: Optional[Dict[str, "TypedAssetReference"]] = None, current_attempt_id: Optional[int] = None, **kwargs ): """ :keyword run_id: :paramtype run_id: str :keyword run_uuid: :paramtype run_uuid: str :keyword parent_run_uuid: :paramtype parent_run_uuid: str :keyword root_run_uuid: :paramtype root_run_uuid: str :keyword target: :paramtype target: str :keyword status: :paramtype status: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword data_container_id: :paramtype data_container_id: str :keyword created_time_utc: :paramtype created_time_utc: ~datetime.datetime :keyword start_time_utc: :paramtype start_time_utc: ~datetime.datetime :keyword end_time_utc: :paramtype end_time_utc: ~datetime.datetime :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse :keyword warnings: :paramtype warnings: list[~flow.models.RunDetailsWarningDto] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword parameters: Dictionary of :code:`<any>`. :paramtype parameters: dict[str, any] :keyword services: This is a dictionary. :paramtype services: dict[str, ~flow.models.EndpointSetting] :keyword input_datasets: :paramtype input_datasets: list[~flow.models.DatasetLineage] :keyword output_datasets: :paramtype output_datasets: list[~flow.models.OutputDatasetLineage] :keyword run_definition: Anything. :paramtype run_definition: any :keyword log_files: This is a dictionary. :paramtype log_files: dict[str, str] :keyword job_cost: :paramtype job_cost: ~flow.models.JobCost :keyword revision: :paramtype revision: long :keyword run_type_v2: :paramtype run_type_v2: ~flow.models.RunTypeV2 :keyword settings: This is a dictionary. :paramtype settings: dict[str, str] :keyword compute_request: :paramtype compute_request: ~flow.models.ComputeRequest :keyword compute: :paramtype compute: ~flow.models.Compute :keyword created_by: :paramtype created_by: ~flow.models.User :keyword compute_duration: :paramtype compute_duration: str :keyword effective_start_time_utc: :paramtype effective_start_time_utc: ~datetime.datetime :keyword run_number: :paramtype run_number: int :keyword root_run_id: :paramtype root_run_id: str :keyword experiment_id: :paramtype experiment_id: str :keyword user_id: :paramtype user_id: str :keyword status_revision: :paramtype status_revision: long :keyword current_compute_time: :paramtype current_compute_time: str :keyword last_start_time_utc: :paramtype last_start_time_utc: ~datetime.datetime :keyword last_modified_by: :paramtype last_modified_by: ~flow.models.User :keyword last_modified_utc: :paramtype last_modified_utc: ~datetime.datetime :keyword duration: :paramtype duration: str :keyword inputs: Dictionary of :code:`<TypedAssetReference>`. :paramtype inputs: dict[str, ~flow.models.TypedAssetReference] :keyword outputs: Dictionary of :code:`<TypedAssetReference>`. :paramtype outputs: dict[str, ~flow.models.TypedAssetReference] :keyword current_attempt_id: :paramtype current_attempt_id: int """ super(RunDetailsDto, self).__init__(**kwargs) self.run_id = run_id self.run_uuid = run_uuid self.parent_run_uuid = parent_run_uuid self.root_run_uuid = root_run_uuid self.target = target self.status = status self.parent_run_id = parent_run_id self.data_container_id = data_container_id self.created_time_utc = created_time_utc self.start_time_utc = start_time_utc self.end_time_utc = end_time_utc self.error = error self.warnings = warnings self.tags = tags self.properties = properties self.parameters = parameters self.services = services self.input_datasets = input_datasets self.output_datasets = output_datasets self.run_definition = run_definition self.log_files = log_files self.job_cost = job_cost self.revision = revision self.run_type_v2 = run_type_v2 self.settings = settings self.compute_request = compute_request self.compute = compute self.created_by = created_by self.compute_duration = compute_duration self.effective_start_time_utc = effective_start_time_utc self.run_number = run_number self.root_run_id = root_run_id self.experiment_id = experiment_id self.user_id = user_id self.status_revision = status_revision self.current_compute_time = current_compute_time self.last_start_time_utc = last_start_time_utc self.last_modified_by = last_modified_by self.last_modified_utc = last_modified_utc self.duration = duration self.inputs = inputs self.outputs = outputs self.current_attempt_id = current_attempt_id class RunDetailsWarningDto(msrest.serialization.Model): """RunDetailsWarningDto. :ivar source: :vartype source: str :ivar message: :vartype message: str """ _attribute_map = { 'source': {'key': 'source', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, *, source: Optional[str] = None, message: Optional[str] = None, **kwargs ): """ :keyword source: :paramtype source: str :keyword message: :paramtype message: str """ super(RunDetailsWarningDto, self).__init__(**kwargs) self.source = source self.message = message class RunDto(msrest.serialization.Model): """RunDto. :ivar run_number: :vartype run_number: int :ivar root_run_id: :vartype root_run_id: str :ivar created_utc: :vartype created_utc: ~datetime.datetime :ivar created_by: :vartype created_by: ~flow.models.User :ivar user_id: :vartype user_id: str :ivar token: :vartype token: str :ivar token_expiry_time_utc: :vartype token_expiry_time_utc: ~datetime.datetime :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse :ivar warnings: :vartype warnings: list[~flow.models.RunDetailsWarningDto] :ivar revision: :vartype revision: long :ivar status_revision: :vartype status_revision: long :ivar run_uuid: :vartype run_uuid: str :ivar parent_run_uuid: :vartype parent_run_uuid: str :ivar root_run_uuid: :vartype root_run_uuid: str :ivar last_start_time_utc: :vartype last_start_time_utc: ~datetime.datetime :ivar current_compute_time: :vartype current_compute_time: str :ivar compute_duration: :vartype compute_duration: str :ivar effective_start_time_utc: :vartype effective_start_time_utc: ~datetime.datetime :ivar last_modified_by: :vartype last_modified_by: ~flow.models.User :ivar last_modified_utc: :vartype last_modified_utc: ~datetime.datetime :ivar duration: :vartype duration: str :ivar cancelation_reason: :vartype cancelation_reason: str :ivar current_attempt_id: :vartype current_attempt_id: int :ivar run_id: :vartype run_id: str :ivar parent_run_id: :vartype parent_run_id: str :ivar experiment_id: :vartype experiment_id: str :ivar status: :vartype status: str :ivar start_time_utc: :vartype start_time_utc: ~datetime.datetime :ivar end_time_utc: :vartype end_time_utc: ~datetime.datetime :ivar schedule_id: :vartype schedule_id: str :ivar display_name: :vartype display_name: str :ivar name: :vartype name: str :ivar data_container_id: :vartype data_container_id: str :ivar description: :vartype description: str :ivar hidden: :vartype hidden: bool :ivar run_type: :vartype run_type: str :ivar run_type_v2: :vartype run_type_v2: ~flow.models.RunTypeV2 :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar parameters: Dictionary of :code:`<any>`. :vartype parameters: dict[str, any] :ivar action_uris: Dictionary of :code:`<string>`. :vartype action_uris: dict[str, str] :ivar script_name: :vartype script_name: str :ivar target: :vartype target: str :ivar unique_child_run_compute_targets: :vartype unique_child_run_compute_targets: list[str] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar settings: Dictionary of :code:`<string>`. :vartype settings: dict[str, str] :ivar services: Dictionary of :code:`<EndpointSetting>`. :vartype services: dict[str, ~flow.models.EndpointSetting] :ivar input_datasets: :vartype input_datasets: list[~flow.models.DatasetLineage] :ivar output_datasets: :vartype output_datasets: list[~flow.models.OutputDatasetLineage] :ivar run_definition: Anything. :vartype run_definition: any :ivar job_specification: Anything. :vartype job_specification: any :ivar primary_metric_name: :vartype primary_metric_name: str :ivar created_from: :vartype created_from: ~flow.models.CreatedFromDto :ivar cancel_uri: :vartype cancel_uri: str :ivar complete_uri: :vartype complete_uri: str :ivar diagnostics_uri: :vartype diagnostics_uri: str :ivar compute_request: :vartype compute_request: ~flow.models.ComputeRequest :ivar compute: :vartype compute: ~flow.models.Compute :ivar retain_for_lifetime_of_workspace: :vartype retain_for_lifetime_of_workspace: bool :ivar queueing_info: :vartype queueing_info: ~flow.models.QueueingInfo :ivar inputs: Dictionary of :code:`<TypedAssetReference>`. :vartype inputs: dict[str, ~flow.models.TypedAssetReference] :ivar outputs: Dictionary of :code:`<TypedAssetReference>`. :vartype outputs: dict[str, ~flow.models.TypedAssetReference] """ _validation = { 'unique_child_run_compute_targets': {'unique': True}, 'input_datasets': {'unique': True}, 'output_datasets': {'unique': True}, } _attribute_map = { 'run_number': {'key': 'runNumber', 'type': 'int'}, 'root_run_id': {'key': 'rootRunId', 'type': 'str'}, 'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'}, 'created_by': {'key': 'createdBy', 'type': 'User'}, 'user_id': {'key': 'userId', 'type': 'str'}, 'token': {'key': 'token', 'type': 'str'}, 'token_expiry_time_utc': {'key': 'tokenExpiryTimeUtc', 'type': 'iso-8601'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, 'warnings': {'key': 'warnings', 'type': '[RunDetailsWarningDto]'}, 'revision': {'key': 'revision', 'type': 'long'}, 'status_revision': {'key': 'statusRevision', 'type': 'long'}, 'run_uuid': {'key': 'runUuid', 'type': 'str'}, 'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'}, 'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'}, 'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'}, 'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'}, 'compute_duration': {'key': 'computeDuration', 'type': 'str'}, 'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'}, 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'}, 'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'}, 'duration': {'key': 'duration', 'type': 'str'}, 'cancelation_reason': {'key': 'cancelationReason', 'type': 'str'}, 'current_attempt_id': {'key': 'currentAttemptId', 'type': 'int'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'}, 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'}, 'schedule_id': {'key': 'scheduleId', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'data_container_id': {'key': 'dataContainerId', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'hidden': {'key': 'hidden', 'type': 'bool'}, 'run_type': {'key': 'runType', 'type': 'str'}, 'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, 'action_uris': {'key': 'actionUris', 'type': '{str}'}, 'script_name': {'key': 'scriptName', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'settings': {'key': 'settings', 'type': '{str}'}, 'services': {'key': 'services', 'type': '{EndpointSetting}'}, 'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'}, 'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'}, 'run_definition': {'key': 'runDefinition', 'type': 'object'}, 'job_specification': {'key': 'jobSpecification', 'type': 'object'}, 'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'}, 'created_from': {'key': 'createdFrom', 'type': 'CreatedFromDto'}, 'cancel_uri': {'key': 'cancelUri', 'type': 'str'}, 'complete_uri': {'key': 'completeUri', 'type': 'str'}, 'diagnostics_uri': {'key': 'diagnosticsUri', 'type': 'str'}, 'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'}, 'compute': {'key': 'compute', 'type': 'Compute'}, 'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'}, 'queueing_info': {'key': 'queueingInfo', 'type': 'QueueingInfo'}, 'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'}, 'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'}, } def __init__( self, *, run_number: Optional[int] = None, root_run_id: Optional[str] = None, created_utc: Optional[datetime.datetime] = None, created_by: Optional["User"] = None, user_id: Optional[str] = None, token: Optional[str] = None, token_expiry_time_utc: Optional[datetime.datetime] = None, error: Optional["ErrorResponse"] = None, warnings: Optional[List["RunDetailsWarningDto"]] = None, revision: Optional[int] = None, status_revision: Optional[int] = None, run_uuid: Optional[str] = None, parent_run_uuid: Optional[str] = None, root_run_uuid: Optional[str] = None, last_start_time_utc: Optional[datetime.datetime] = None, current_compute_time: Optional[str] = None, compute_duration: Optional[str] = None, effective_start_time_utc: Optional[datetime.datetime] = None, last_modified_by: Optional["User"] = None, last_modified_utc: Optional[datetime.datetime] = None, duration: Optional[str] = None, cancelation_reason: Optional[str] = None, current_attempt_id: Optional[int] = None, run_id: Optional[str] = None, parent_run_id: Optional[str] = None, experiment_id: Optional[str] = None, status: Optional[str] = None, start_time_utc: Optional[datetime.datetime] = None, end_time_utc: Optional[datetime.datetime] = None, schedule_id: Optional[str] = None, display_name: Optional[str] = None, name: Optional[str] = None, data_container_id: Optional[str] = None, description: Optional[str] = None, hidden: Optional[bool] = None, run_type: Optional[str] = None, run_type_v2: Optional["RunTypeV2"] = None, properties: Optional[Dict[str, str]] = None, parameters: Optional[Dict[str, Any]] = None, action_uris: Optional[Dict[str, str]] = None, script_name: Optional[str] = None, target: Optional[str] = None, unique_child_run_compute_targets: Optional[List[str]] = None, tags: Optional[Dict[str, str]] = None, settings: Optional[Dict[str, str]] = None, services: Optional[Dict[str, "EndpointSetting"]] = None, input_datasets: Optional[List["DatasetLineage"]] = None, output_datasets: Optional[List["OutputDatasetLineage"]] = None, run_definition: Optional[Any] = None, job_specification: Optional[Any] = None, primary_metric_name: Optional[str] = None, created_from: Optional["CreatedFromDto"] = None, cancel_uri: Optional[str] = None, complete_uri: Optional[str] = None, diagnostics_uri: Optional[str] = None, compute_request: Optional["ComputeRequest"] = None, compute: Optional["Compute"] = None, retain_for_lifetime_of_workspace: Optional[bool] = None, queueing_info: Optional["QueueingInfo"] = None, inputs: Optional[Dict[str, "TypedAssetReference"]] = None, outputs: Optional[Dict[str, "TypedAssetReference"]] = None, **kwargs ): """ :keyword run_number: :paramtype run_number: int :keyword root_run_id: :paramtype root_run_id: str :keyword created_utc: :paramtype created_utc: ~datetime.datetime :keyword created_by: :paramtype created_by: ~flow.models.User :keyword user_id: :paramtype user_id: str :keyword token: :paramtype token: str :keyword token_expiry_time_utc: :paramtype token_expiry_time_utc: ~datetime.datetime :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse :keyword warnings: :paramtype warnings: list[~flow.models.RunDetailsWarningDto] :keyword revision: :paramtype revision: long :keyword status_revision: :paramtype status_revision: long :keyword run_uuid: :paramtype run_uuid: str :keyword parent_run_uuid: :paramtype parent_run_uuid: str :keyword root_run_uuid: :paramtype root_run_uuid: str :keyword last_start_time_utc: :paramtype last_start_time_utc: ~datetime.datetime :keyword current_compute_time: :paramtype current_compute_time: str :keyword compute_duration: :paramtype compute_duration: str :keyword effective_start_time_utc: :paramtype effective_start_time_utc: ~datetime.datetime :keyword last_modified_by: :paramtype last_modified_by: ~flow.models.User :keyword last_modified_utc: :paramtype last_modified_utc: ~datetime.datetime :keyword duration: :paramtype duration: str :keyword cancelation_reason: :paramtype cancelation_reason: str :keyword current_attempt_id: :paramtype current_attempt_id: int :keyword run_id: :paramtype run_id: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword experiment_id: :paramtype experiment_id: str :keyword status: :paramtype status: str :keyword start_time_utc: :paramtype start_time_utc: ~datetime.datetime :keyword end_time_utc: :paramtype end_time_utc: ~datetime.datetime :keyword schedule_id: :paramtype schedule_id: str :keyword display_name: :paramtype display_name: str :keyword name: :paramtype name: str :keyword data_container_id: :paramtype data_container_id: str :keyword description: :paramtype description: str :keyword hidden: :paramtype hidden: bool :keyword run_type: :paramtype run_type: str :keyword run_type_v2: :paramtype run_type_v2: ~flow.models.RunTypeV2 :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword parameters: Dictionary of :code:`<any>`. :paramtype parameters: dict[str, any] :keyword action_uris: Dictionary of :code:`<string>`. :paramtype action_uris: dict[str, str] :keyword script_name: :paramtype script_name: str :keyword target: :paramtype target: str :keyword unique_child_run_compute_targets: :paramtype unique_child_run_compute_targets: list[str] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword settings: Dictionary of :code:`<string>`. :paramtype settings: dict[str, str] :keyword services: Dictionary of :code:`<EndpointSetting>`. :paramtype services: dict[str, ~flow.models.EndpointSetting] :keyword input_datasets: :paramtype input_datasets: list[~flow.models.DatasetLineage] :keyword output_datasets: :paramtype output_datasets: list[~flow.models.OutputDatasetLineage] :keyword run_definition: Anything. :paramtype run_definition: any :keyword job_specification: Anything. :paramtype job_specification: any :keyword primary_metric_name: :paramtype primary_metric_name: str :keyword created_from: :paramtype created_from: ~flow.models.CreatedFromDto :keyword cancel_uri: :paramtype cancel_uri: str :keyword complete_uri: :paramtype complete_uri: str :keyword diagnostics_uri: :paramtype diagnostics_uri: str :keyword compute_request: :paramtype compute_request: ~flow.models.ComputeRequest :keyword compute: :paramtype compute: ~flow.models.Compute :keyword retain_for_lifetime_of_workspace: :paramtype retain_for_lifetime_of_workspace: bool :keyword queueing_info: :paramtype queueing_info: ~flow.models.QueueingInfo :keyword inputs: Dictionary of :code:`<TypedAssetReference>`. :paramtype inputs: dict[str, ~flow.models.TypedAssetReference] :keyword outputs: Dictionary of :code:`<TypedAssetReference>`. :paramtype outputs: dict[str, ~flow.models.TypedAssetReference] """ super(RunDto, self).__init__(**kwargs) self.run_number = run_number self.root_run_id = root_run_id self.created_utc = created_utc self.created_by = created_by self.user_id = user_id self.token = token self.token_expiry_time_utc = token_expiry_time_utc self.error = error self.warnings = warnings self.revision = revision self.status_revision = status_revision self.run_uuid = run_uuid self.parent_run_uuid = parent_run_uuid self.root_run_uuid = root_run_uuid self.last_start_time_utc = last_start_time_utc self.current_compute_time = current_compute_time self.compute_duration = compute_duration self.effective_start_time_utc = effective_start_time_utc self.last_modified_by = last_modified_by self.last_modified_utc = last_modified_utc self.duration = duration self.cancelation_reason = cancelation_reason self.current_attempt_id = current_attempt_id self.run_id = run_id self.parent_run_id = parent_run_id self.experiment_id = experiment_id self.status = status self.start_time_utc = start_time_utc self.end_time_utc = end_time_utc self.schedule_id = schedule_id self.display_name = display_name self.name = name self.data_container_id = data_container_id self.description = description self.hidden = hidden self.run_type = run_type self.run_type_v2 = run_type_v2 self.properties = properties self.parameters = parameters self.action_uris = action_uris self.script_name = script_name self.target = target self.unique_child_run_compute_targets = unique_child_run_compute_targets self.tags = tags self.settings = settings self.services = services self.input_datasets = input_datasets self.output_datasets = output_datasets self.run_definition = run_definition self.job_specification = job_specification self.primary_metric_name = primary_metric_name self.created_from = created_from self.cancel_uri = cancel_uri self.complete_uri = complete_uri self.diagnostics_uri = diagnostics_uri self.compute_request = compute_request self.compute = compute self.retain_for_lifetime_of_workspace = retain_for_lifetime_of_workspace self.queueing_info = queueing_info self.inputs = inputs self.outputs = outputs class RunIndexEntity(msrest.serialization.Model): """RunIndexEntity. Variables are only populated by the server, and will be ignored when sending a request. :ivar schema_id: :vartype schema_id: str :ivar entity_id: :vartype entity_id: str :ivar kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned". :vartype kind: str or ~flow.models.EntityKind :ivar annotations: :vartype annotations: ~flow.models.RunAnnotations :ivar properties: :vartype properties: ~flow.models.RunProperties :ivar internal: Any object. :vartype internal: any :ivar update_sequence: :vartype update_sequence: long :ivar type: :vartype type: str :ivar version: :vartype version: str :ivar entity_container_id: :vartype entity_container_id: str :ivar entity_object_id: :vartype entity_object_id: str :ivar resource_type: :vartype resource_type: str :ivar relationships: :vartype relationships: list[~flow.models.Relationship] :ivar asset_id: :vartype asset_id: str """ _validation = { 'version': {'readonly': True}, 'entity_container_id': {'readonly': True}, 'entity_object_id': {'readonly': True}, 'resource_type': {'readonly': True}, } _attribute_map = { 'schema_id': {'key': 'schemaId', 'type': 'str'}, 'entity_id': {'key': 'entityId', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': 'RunAnnotations'}, 'properties': {'key': 'properties', 'type': 'RunProperties'}, 'internal': {'key': 'internal', 'type': 'object'}, 'update_sequence': {'key': 'updateSequence', 'type': 'long'}, 'type': {'key': 'type', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'entity_container_id': {'key': 'entityContainerId', 'type': 'str'}, 'entity_object_id': {'key': 'entityObjectId', 'type': 'str'}, 'resource_type': {'key': 'resourceType', 'type': 'str'}, 'relationships': {'key': 'relationships', 'type': '[Relationship]'}, 'asset_id': {'key': 'assetId', 'type': 'str'}, } def __init__( self, *, schema_id: Optional[str] = None, entity_id: Optional[str] = None, kind: Optional[Union[str, "EntityKind"]] = None, annotations: Optional["RunAnnotations"] = None, properties: Optional["RunProperties"] = None, internal: Optional[Any] = None, update_sequence: Optional[int] = None, type: Optional[str] = None, relationships: Optional[List["Relationship"]] = None, asset_id: Optional[str] = None, **kwargs ): """ :keyword schema_id: :paramtype schema_id: str :keyword entity_id: :paramtype entity_id: str :keyword kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned". :paramtype kind: str or ~flow.models.EntityKind :keyword annotations: :paramtype annotations: ~flow.models.RunAnnotations :keyword properties: :paramtype properties: ~flow.models.RunProperties :keyword internal: Any object. :paramtype internal: any :keyword update_sequence: :paramtype update_sequence: long :keyword type: :paramtype type: str :keyword relationships: :paramtype relationships: list[~flow.models.Relationship] :keyword asset_id: :paramtype asset_id: str """ super(RunIndexEntity, self).__init__(**kwargs) self.schema_id = schema_id self.entity_id = entity_id self.kind = kind self.annotations = annotations self.properties = properties self.internal = internal self.update_sequence = update_sequence self.type = type self.version = None self.entity_container_id = None self.entity_object_id = None self.resource_type = None self.relationships = relationships self.asset_id = asset_id class RunIndexMetricSummary(msrest.serialization.Model): """RunIndexMetricSummary. :ivar count: :vartype count: long :ivar last_value: Anything. :vartype last_value: any :ivar minimum_value: Anything. :vartype minimum_value: any :ivar maximum_value: Anything. :vartype maximum_value: any :ivar metric_type: :vartype metric_type: str """ _attribute_map = { 'count': {'key': 'count', 'type': 'long'}, 'last_value': {'key': 'lastValue', 'type': 'object'}, 'minimum_value': {'key': 'minimumValue', 'type': 'object'}, 'maximum_value': {'key': 'maximumValue', 'type': 'object'}, 'metric_type': {'key': 'metricType', 'type': 'str'}, } def __init__( self, *, count: Optional[int] = None, last_value: Optional[Any] = None, minimum_value: Optional[Any] = None, maximum_value: Optional[Any] = None, metric_type: Optional[str] = None, **kwargs ): """ :keyword count: :paramtype count: long :keyword last_value: Anything. :paramtype last_value: any :keyword minimum_value: Anything. :paramtype minimum_value: any :keyword maximum_value: Anything. :paramtype maximum_value: any :keyword metric_type: :paramtype metric_type: str """ super(RunIndexMetricSummary, self).__init__(**kwargs) self.count = count self.last_value = last_value self.minimum_value = minimum_value self.maximum_value = maximum_value self.metric_type = metric_type class RunIndexMetricSummarySystemObject(msrest.serialization.Model): """RunIndexMetricSummarySystemObject. :ivar count: :vartype count: long :ivar last_value: Anything. :vartype last_value: any :ivar minimum_value: Anything. :vartype minimum_value: any :ivar maximum_value: Anything. :vartype maximum_value: any :ivar metric_type: :vartype metric_type: str """ _attribute_map = { 'count': {'key': 'count', 'type': 'long'}, 'last_value': {'key': 'lastValue', 'type': 'object'}, 'minimum_value': {'key': 'minimumValue', 'type': 'object'}, 'maximum_value': {'key': 'maximumValue', 'type': 'object'}, 'metric_type': {'key': 'metricType', 'type': 'str'}, } def __init__( self, *, count: Optional[int] = None, last_value: Optional[Any] = None, minimum_value: Optional[Any] = None, maximum_value: Optional[Any] = None, metric_type: Optional[str] = None, **kwargs ): """ :keyword count: :paramtype count: long :keyword last_value: Anything. :paramtype last_value: any :keyword minimum_value: Anything. :paramtype minimum_value: any :keyword maximum_value: Anything. :paramtype maximum_value: any :keyword metric_type: :paramtype metric_type: str """ super(RunIndexMetricSummarySystemObject, self).__init__(**kwargs) self.count = count self.last_value = last_value self.minimum_value = minimum_value self.maximum_value = maximum_value self.metric_type = metric_type class RunIndexResourceMetricSummary(msrest.serialization.Model): """RunIndexResourceMetricSummary. :ivar gpu_utilization_percent_last_hour: :vartype gpu_utilization_percent_last_hour: float :ivar gpu_memory_utilization_percent_last_hour: :vartype gpu_memory_utilization_percent_last_hour: float :ivar gpu_energy_joules: :vartype gpu_energy_joules: float :ivar resource_metric_names: :vartype resource_metric_names: list[str] """ _attribute_map = { 'gpu_utilization_percent_last_hour': {'key': 'gpuUtilizationPercentLastHour', 'type': 'float'}, 'gpu_memory_utilization_percent_last_hour': {'key': 'gpuMemoryUtilizationPercentLastHour', 'type': 'float'}, 'gpu_energy_joules': {'key': 'gpuEnergyJoules', 'type': 'float'}, 'resource_metric_names': {'key': 'resourceMetricNames', 'type': '[str]'}, } def __init__( self, *, gpu_utilization_percent_last_hour: Optional[float] = None, gpu_memory_utilization_percent_last_hour: Optional[float] = None, gpu_energy_joules: Optional[float] = None, resource_metric_names: Optional[List[str]] = None, **kwargs ): """ :keyword gpu_utilization_percent_last_hour: :paramtype gpu_utilization_percent_last_hour: float :keyword gpu_memory_utilization_percent_last_hour: :paramtype gpu_memory_utilization_percent_last_hour: float :keyword gpu_energy_joules: :paramtype gpu_energy_joules: float :keyword resource_metric_names: :paramtype resource_metric_names: list[str] """ super(RunIndexResourceMetricSummary, self).__init__(**kwargs) self.gpu_utilization_percent_last_hour = gpu_utilization_percent_last_hour self.gpu_memory_utilization_percent_last_hour = gpu_memory_utilization_percent_last_hour self.gpu_energy_joules = gpu_energy_joules self.resource_metric_names = resource_metric_names class RunMetricDto(msrest.serialization.Model): """RunMetricDto. :ivar run_id: :vartype run_id: str :ivar metric_id: :vartype metric_id: str :ivar data_container_id: :vartype data_container_id: str :ivar metric_type: :vartype metric_type: str :ivar created_utc: :vartype created_utc: ~datetime.datetime :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar label: :vartype label: str :ivar num_cells: :vartype num_cells: int :ivar data_location: :vartype data_location: str :ivar cells: :vartype cells: list[dict[str, any]] :ivar schema: :vartype schema: ~flow.models.MetricSchemaDto """ _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, 'metric_id': {'key': 'metricId', 'type': 'str'}, 'data_container_id': {'key': 'dataContainerId', 'type': 'str'}, 'metric_type': {'key': 'metricType', 'type': 'str'}, 'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'num_cells': {'key': 'numCells', 'type': 'int'}, 'data_location': {'key': 'dataLocation', 'type': 'str'}, 'cells': {'key': 'cells', 'type': '[{object}]'}, 'schema': {'key': 'schema', 'type': 'MetricSchemaDto'}, } def __init__( self, *, run_id: Optional[str] = None, metric_id: Optional[str] = None, data_container_id: Optional[str] = None, metric_type: Optional[str] = None, created_utc: Optional[datetime.datetime] = None, name: Optional[str] = None, description: Optional[str] = None, label: Optional[str] = None, num_cells: Optional[int] = None, data_location: Optional[str] = None, cells: Optional[List[Dict[str, Any]]] = None, schema: Optional["MetricSchemaDto"] = None, **kwargs ): """ :keyword run_id: :paramtype run_id: str :keyword metric_id: :paramtype metric_id: str :keyword data_container_id: :paramtype data_container_id: str :keyword metric_type: :paramtype metric_type: str :keyword created_utc: :paramtype created_utc: ~datetime.datetime :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword label: :paramtype label: str :keyword num_cells: :paramtype num_cells: int :keyword data_location: :paramtype data_location: str :keyword cells: :paramtype cells: list[dict[str, any]] :keyword schema: :paramtype schema: ~flow.models.MetricSchemaDto """ super(RunMetricDto, self).__init__(**kwargs) self.run_id = run_id self.metric_id = metric_id self.data_container_id = data_container_id self.metric_type = metric_type self.created_utc = created_utc self.name = name self.description = description self.label = label self.num_cells = num_cells self.data_location = data_location self.cells = cells self.schema = schema class RunMetricsTypesDto(msrest.serialization.Model): """RunMetricsTypesDto. :ivar name: :vartype name: str :ivar type: :vartype type: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, type: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: :paramtype type: str """ super(RunMetricsTypesDto, self).__init__(**kwargs) self.name = name self.type = type class RunProperties(msrest.serialization.Model): """RunProperties. :ivar data_container_id: :vartype data_container_id: str :ivar target_name: :vartype target_name: str :ivar run_name: :vartype run_name: str :ivar experiment_name: :vartype experiment_name: str :ivar run_id: :vartype run_id: str :ivar parent_run_id: :vartype parent_run_id: str :ivar root_run_id: :vartype root_run_id: str :ivar run_type: :vartype run_type: str :ivar run_type_v2: :vartype run_type_v2: ~flow.models.RunTypeV2Index :ivar script_name: :vartype script_name: str :ivar experiment_id: :vartype experiment_id: str :ivar run_uuid: :vartype run_uuid: str :ivar parent_run_uuid: :vartype parent_run_uuid: str :ivar run_number: :vartype run_number: int :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar compute_request: :vartype compute_request: ~flow.models.ComputeRequest :ivar compute: :vartype compute: ~flow.models.Compute :ivar user_properties: This is a dictionary. :vartype user_properties: dict[str, str] :ivar action_uris: This is a dictionary. :vartype action_uris: dict[str, str] :ivar duration: :vartype duration: str :ivar duration_milliseconds: :vartype duration_milliseconds: float :ivar creation_context: :vartype creation_context: ~flow.models.CreationContext """ _attribute_map = { 'data_container_id': {'key': 'dataContainerId', 'type': 'str'}, 'target_name': {'key': 'targetName', 'type': 'str'}, 'run_name': {'key': 'runName', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'root_run_id': {'key': 'rootRunId', 'type': 'str'}, 'run_type': {'key': 'runType', 'type': 'str'}, 'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2Index'}, 'script_name': {'key': 'scriptName', 'type': 'str'}, 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'run_uuid': {'key': 'runUuid', 'type': 'str'}, 'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'}, 'run_number': {'key': 'runNumber', 'type': 'int'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'}, 'compute': {'key': 'compute', 'type': 'Compute'}, 'user_properties': {'key': 'userProperties', 'type': '{str}'}, 'action_uris': {'key': 'actionUris', 'type': '{str}'}, 'duration': {'key': 'duration', 'type': 'str'}, 'duration_milliseconds': {'key': 'durationMilliseconds', 'type': 'float'}, 'creation_context': {'key': 'creationContext', 'type': 'CreationContext'}, } def __init__( self, *, data_container_id: Optional[str] = None, target_name: Optional[str] = None, run_name: Optional[str] = None, experiment_name: Optional[str] = None, run_id: Optional[str] = None, parent_run_id: Optional[str] = None, root_run_id: Optional[str] = None, run_type: Optional[str] = None, run_type_v2: Optional["RunTypeV2Index"] = None, script_name: Optional[str] = None, experiment_id: Optional[str] = None, run_uuid: Optional[str] = None, parent_run_uuid: Optional[str] = None, run_number: Optional[int] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, compute_request: Optional["ComputeRequest"] = None, compute: Optional["Compute"] = None, user_properties: Optional[Dict[str, str]] = None, action_uris: Optional[Dict[str, str]] = None, duration: Optional[str] = None, duration_milliseconds: Optional[float] = None, creation_context: Optional["CreationContext"] = None, **kwargs ): """ :keyword data_container_id: :paramtype data_container_id: str :keyword target_name: :paramtype target_name: str :keyword run_name: :paramtype run_name: str :keyword experiment_name: :paramtype experiment_name: str :keyword run_id: :paramtype run_id: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword root_run_id: :paramtype root_run_id: str :keyword run_type: :paramtype run_type: str :keyword run_type_v2: :paramtype run_type_v2: ~flow.models.RunTypeV2Index :keyword script_name: :paramtype script_name: str :keyword experiment_id: :paramtype experiment_id: str :keyword run_uuid: :paramtype run_uuid: str :keyword parent_run_uuid: :paramtype parent_run_uuid: str :keyword run_number: :paramtype run_number: int :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword compute_request: :paramtype compute_request: ~flow.models.ComputeRequest :keyword compute: :paramtype compute: ~flow.models.Compute :keyword user_properties: This is a dictionary. :paramtype user_properties: dict[str, str] :keyword action_uris: This is a dictionary. :paramtype action_uris: dict[str, str] :keyword duration: :paramtype duration: str :keyword duration_milliseconds: :paramtype duration_milliseconds: float :keyword creation_context: :paramtype creation_context: ~flow.models.CreationContext """ super(RunProperties, self).__init__(**kwargs) self.data_container_id = data_container_id self.target_name = target_name self.run_name = run_name self.experiment_name = experiment_name self.run_id = run_id self.parent_run_id = parent_run_id self.root_run_id = root_run_id self.run_type = run_type self.run_type_v2 = run_type_v2 self.script_name = script_name self.experiment_id = experiment_id self.run_uuid = run_uuid self.parent_run_uuid = parent_run_uuid self.run_number = run_number self.start_time = start_time self.end_time = end_time self.compute_request = compute_request self.compute = compute self.user_properties = user_properties self.action_uris = action_uris self.duration = duration self.duration_milliseconds = duration_milliseconds self.creation_context = creation_context class RunSettingParameter(msrest.serialization.Model): """RunSettingParameter. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar parameter_type: Possible values include: "Undefined", "Int", "Double", "Bool", "String", "JsonString", "YamlString", "StringList". :vartype parameter_type: str or ~flow.models.RunSettingParameterType :ivar is_optional: :vartype is_optional: bool :ivar default_value: :vartype default_value: str :ivar lower_bound: :vartype lower_bound: str :ivar upper_bound: :vartype upper_bound: str :ivar description: :vartype description: str :ivar run_setting_ui_hint: :vartype run_setting_ui_hint: ~flow.models.RunSettingUIParameterHint :ivar argument_name: :vartype argument_name: str :ivar section_name: :vartype section_name: str :ivar section_description: :vartype section_description: str :ivar section_argument_name: :vartype section_argument_name: str :ivar examples: :vartype examples: list[str] :ivar enum_values: :vartype enum_values: list[str] :ivar enum_values_to_argument_strings: This is a dictionary. :vartype enum_values_to_argument_strings: dict[str, str] :ivar enabled_by_parameter_name: :vartype enabled_by_parameter_name: str :ivar enabled_by_parameter_values: :vartype enabled_by_parameter_values: list[str] :ivar disabled_by_parameters: :vartype disabled_by_parameters: list[str] :ivar module_run_setting_type: Possible values include: "All", "Released", "Default", "Testing", "Legacy", "Preview", "UxFull", "Integration", "UxIntegration", "Full". :vartype module_run_setting_type: str or ~flow.models.ModuleRunSettingTypes :ivar linked_parameter_default_value_mapping: Dictionary of :code:`<string>`. :vartype linked_parameter_default_value_mapping: dict[str, str] :ivar linked_parameter_key_name: :vartype linked_parameter_key_name: str :ivar support_link_setting: :vartype support_link_setting: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'parameter_type': {'key': 'parameterType', 'type': 'str'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, 'lower_bound': {'key': 'lowerBound', 'type': 'str'}, 'upper_bound': {'key': 'upperBound', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'run_setting_ui_hint': {'key': 'runSettingUIHint', 'type': 'RunSettingUIParameterHint'}, 'argument_name': {'key': 'argumentName', 'type': 'str'}, 'section_name': {'key': 'sectionName', 'type': 'str'}, 'section_description': {'key': 'sectionDescription', 'type': 'str'}, 'section_argument_name': {'key': 'sectionArgumentName', 'type': 'str'}, 'examples': {'key': 'examples', 'type': '[str]'}, 'enum_values': {'key': 'enumValues', 'type': '[str]'}, 'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'}, 'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'}, 'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'}, 'disabled_by_parameters': {'key': 'disabledByParameters', 'type': '[str]'}, 'module_run_setting_type': {'key': 'moduleRunSettingType', 'type': 'str'}, 'linked_parameter_default_value_mapping': {'key': 'linkedParameterDefaultValueMapping', 'type': '{str}'}, 'linked_parameter_key_name': {'key': 'linkedParameterKeyName', 'type': 'str'}, 'support_link_setting': {'key': 'supportLinkSetting', 'type': 'bool'}, } def __init__( self, *, name: Optional[str] = None, label: Optional[str] = None, parameter_type: Optional[Union[str, "RunSettingParameterType"]] = None, is_optional: Optional[bool] = None, default_value: Optional[str] = None, lower_bound: Optional[str] = None, upper_bound: Optional[str] = None, description: Optional[str] = None, run_setting_ui_hint: Optional["RunSettingUIParameterHint"] = None, argument_name: Optional[str] = None, section_name: Optional[str] = None, section_description: Optional[str] = None, section_argument_name: Optional[str] = None, examples: Optional[List[str]] = None, enum_values: Optional[List[str]] = None, enum_values_to_argument_strings: Optional[Dict[str, str]] = None, enabled_by_parameter_name: Optional[str] = None, enabled_by_parameter_values: Optional[List[str]] = None, disabled_by_parameters: Optional[List[str]] = None, module_run_setting_type: Optional[Union[str, "ModuleRunSettingTypes"]] = None, linked_parameter_default_value_mapping: Optional[Dict[str, str]] = None, linked_parameter_key_name: Optional[str] = None, support_link_setting: Optional[bool] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword parameter_type: Possible values include: "Undefined", "Int", "Double", "Bool", "String", "JsonString", "YamlString", "StringList". :paramtype parameter_type: str or ~flow.models.RunSettingParameterType :keyword is_optional: :paramtype is_optional: bool :keyword default_value: :paramtype default_value: str :keyword lower_bound: :paramtype lower_bound: str :keyword upper_bound: :paramtype upper_bound: str :keyword description: :paramtype description: str :keyword run_setting_ui_hint: :paramtype run_setting_ui_hint: ~flow.models.RunSettingUIParameterHint :keyword argument_name: :paramtype argument_name: str :keyword section_name: :paramtype section_name: str :keyword section_description: :paramtype section_description: str :keyword section_argument_name: :paramtype section_argument_name: str :keyword examples: :paramtype examples: list[str] :keyword enum_values: :paramtype enum_values: list[str] :keyword enum_values_to_argument_strings: This is a dictionary. :paramtype enum_values_to_argument_strings: dict[str, str] :keyword enabled_by_parameter_name: :paramtype enabled_by_parameter_name: str :keyword enabled_by_parameter_values: :paramtype enabled_by_parameter_values: list[str] :keyword disabled_by_parameters: :paramtype disabled_by_parameters: list[str] :keyword module_run_setting_type: Possible values include: "All", "Released", "Default", "Testing", "Legacy", "Preview", "UxFull", "Integration", "UxIntegration", "Full". :paramtype module_run_setting_type: str or ~flow.models.ModuleRunSettingTypes :keyword linked_parameter_default_value_mapping: Dictionary of :code:`<string>`. :paramtype linked_parameter_default_value_mapping: dict[str, str] :keyword linked_parameter_key_name: :paramtype linked_parameter_key_name: str :keyword support_link_setting: :paramtype support_link_setting: bool """ super(RunSettingParameter, self).__init__(**kwargs) self.name = name self.label = label self.parameter_type = parameter_type self.is_optional = is_optional self.default_value = default_value self.lower_bound = lower_bound self.upper_bound = upper_bound self.description = description self.run_setting_ui_hint = run_setting_ui_hint self.argument_name = argument_name self.section_name = section_name self.section_description = section_description self.section_argument_name = section_argument_name self.examples = examples self.enum_values = enum_values self.enum_values_to_argument_strings = enum_values_to_argument_strings self.enabled_by_parameter_name = enabled_by_parameter_name self.enabled_by_parameter_values = enabled_by_parameter_values self.disabled_by_parameters = disabled_by_parameters self.module_run_setting_type = module_run_setting_type self.linked_parameter_default_value_mapping = linked_parameter_default_value_mapping self.linked_parameter_key_name = linked_parameter_key_name self.support_link_setting = support_link_setting class RunSettingParameterAssignment(msrest.serialization.Model): """RunSettingParameterAssignment. :ivar use_graph_default_compute: :vartype use_graph_default_compute: bool :ivar mlc_compute_type: :vartype mlc_compute_type: str :ivar compute_run_settings: :vartype compute_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar linked_parameter_name: :vartype linked_parameter_name: str :ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition". :vartype value_type: str or ~flow.models.ParameterValueType :ivar assignments_to_concatenate: :vartype assignments_to_concatenate: list[~flow.models.ParameterAssignment] :ivar data_path_assignment: :vartype data_path_assignment: ~flow.models.LegacyDataPath :ivar data_set_definition_value_assignment: :vartype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue :ivar name: :vartype name: str :ivar value: :vartype value: str """ _attribute_map = { 'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'}, 'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'}, 'compute_run_settings': {'key': 'computeRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'linked_parameter_name': {'key': 'linkedParameterName', 'type': 'str'}, 'value_type': {'key': 'valueType', 'type': 'str'}, 'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[ParameterAssignment]'}, 'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'LegacyDataPath'}, 'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'DataSetDefinitionValue'}, 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, *, use_graph_default_compute: Optional[bool] = None, mlc_compute_type: Optional[str] = None, compute_run_settings: Optional[List["RunSettingParameterAssignment"]] = None, linked_parameter_name: Optional[str] = None, value_type: Optional[Union[str, "ParameterValueType"]] = None, assignments_to_concatenate: Optional[List["ParameterAssignment"]] = None, data_path_assignment: Optional["LegacyDataPath"] = None, data_set_definition_value_assignment: Optional["DataSetDefinitionValue"] = None, name: Optional[str] = None, value: Optional[str] = None, **kwargs ): """ :keyword use_graph_default_compute: :paramtype use_graph_default_compute: bool :keyword mlc_compute_type: :paramtype mlc_compute_type: str :keyword compute_run_settings: :paramtype compute_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword linked_parameter_name: :paramtype linked_parameter_name: str :keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate", "Input", "DataPath", "DataSetDefinition". :paramtype value_type: str or ~flow.models.ParameterValueType :keyword assignments_to_concatenate: :paramtype assignments_to_concatenate: list[~flow.models.ParameterAssignment] :keyword data_path_assignment: :paramtype data_path_assignment: ~flow.models.LegacyDataPath :keyword data_set_definition_value_assignment: :paramtype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue :keyword name: :paramtype name: str :keyword value: :paramtype value: str """ super(RunSettingParameterAssignment, self).__init__(**kwargs) self.use_graph_default_compute = use_graph_default_compute self.mlc_compute_type = mlc_compute_type self.compute_run_settings = compute_run_settings self.linked_parameter_name = linked_parameter_name self.value_type = value_type self.assignments_to_concatenate = assignments_to_concatenate self.data_path_assignment = data_path_assignment self.data_set_definition_value_assignment = data_set_definition_value_assignment self.name = name self.value = value class RunSettingUIParameterHint(msrest.serialization.Model): """RunSettingUIParameterHint. :ivar ui_widget_type: Possible values include: "Default", "ComputeSelection", "JsonEditor", "Mode", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "Checkbox", "MultipleSelection", "HyperparameterConfiguration", "JsonTextBox", "Connection", "Static". :vartype ui_widget_type: str or ~flow.models.RunSettingUIWidgetTypeEnum :ivar json_editor: :vartype json_editor: ~flow.models.UIJsonEditor :ivar yaml_editor: :vartype yaml_editor: ~flow.models.UIYamlEditor :ivar compute_selection: :vartype compute_selection: ~flow.models.UIComputeSelection :ivar hyperparameter_configuration: :vartype hyperparameter_configuration: ~flow.models.UIHyperparameterConfiguration :ivar ux_ignore: :vartype ux_ignore: bool :ivar anonymous: :vartype anonymous: bool :ivar support_reset: :vartype support_reset: bool """ _attribute_map = { 'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'}, 'json_editor': {'key': 'jsonEditor', 'type': 'UIJsonEditor'}, 'yaml_editor': {'key': 'yamlEditor', 'type': 'UIYamlEditor'}, 'compute_selection': {'key': 'computeSelection', 'type': 'UIComputeSelection'}, 'hyperparameter_configuration': {'key': 'hyperparameterConfiguration', 'type': 'UIHyperparameterConfiguration'}, 'ux_ignore': {'key': 'uxIgnore', 'type': 'bool'}, 'anonymous': {'key': 'anonymous', 'type': 'bool'}, 'support_reset': {'key': 'supportReset', 'type': 'bool'}, } def __init__( self, *, ui_widget_type: Optional[Union[str, "RunSettingUIWidgetTypeEnum"]] = None, json_editor: Optional["UIJsonEditor"] = None, yaml_editor: Optional["UIYamlEditor"] = None, compute_selection: Optional["UIComputeSelection"] = None, hyperparameter_configuration: Optional["UIHyperparameterConfiguration"] = None, ux_ignore: Optional[bool] = None, anonymous: Optional[bool] = None, support_reset: Optional[bool] = None, **kwargs ): """ :keyword ui_widget_type: Possible values include: "Default", "ComputeSelection", "JsonEditor", "Mode", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "Checkbox", "MultipleSelection", "HyperparameterConfiguration", "JsonTextBox", "Connection", "Static". :paramtype ui_widget_type: str or ~flow.models.RunSettingUIWidgetTypeEnum :keyword json_editor: :paramtype json_editor: ~flow.models.UIJsonEditor :keyword yaml_editor: :paramtype yaml_editor: ~flow.models.UIYamlEditor :keyword compute_selection: :paramtype compute_selection: ~flow.models.UIComputeSelection :keyword hyperparameter_configuration: :paramtype hyperparameter_configuration: ~flow.models.UIHyperparameterConfiguration :keyword ux_ignore: :paramtype ux_ignore: bool :keyword anonymous: :paramtype anonymous: bool :keyword support_reset: :paramtype support_reset: bool """ super(RunSettingUIParameterHint, self).__init__(**kwargs) self.ui_widget_type = ui_widget_type self.json_editor = json_editor self.yaml_editor = yaml_editor self.compute_selection = compute_selection self.hyperparameter_configuration = hyperparameter_configuration self.ux_ignore = ux_ignore self.anonymous = anonymous self.support_reset = support_reset class RunStatusPeriod(msrest.serialization.Model): """RunStatusPeriod. :ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype status: str or ~flow.models.RunStatus :ivar sub_periods: :vartype sub_periods: list[~flow.models.SubStatusPeriod] :ivar start: :vartype start: long :ivar end: :vartype end: long """ _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'sub_periods': {'key': 'subPeriods', 'type': '[SubStatusPeriod]'}, 'start': {'key': 'start', 'type': 'long'}, 'end': {'key': 'end', 'type': 'long'}, } def __init__( self, *, status: Optional[Union[str, "RunStatus"]] = None, sub_periods: Optional[List["SubStatusPeriod"]] = None, start: Optional[int] = None, end: Optional[int] = None, **kwargs ): """ :keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype status: str or ~flow.models.RunStatus :keyword sub_periods: :paramtype sub_periods: list[~flow.models.SubStatusPeriod] :keyword start: :paramtype start: long :keyword end: :paramtype end: long """ super(RunStatusPeriod, self).__init__(**kwargs) self.status = status self.sub_periods = sub_periods self.start = start self.end = end class RuntimeConfiguration(msrest.serialization.Model): """RuntimeConfiguration. :ivar base_image: :vartype base_image: str :ivar version: :vartype version: str """ _attribute_map = { 'base_image': {'key': 'baseImage', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, *, base_image: Optional[str] = None, version: Optional[str] = None, **kwargs ): """ :keyword base_image: :paramtype base_image: str :keyword version: :paramtype version: str """ super(RuntimeConfiguration, self).__init__(**kwargs) self.base_image = base_image self.version = version class RunTypeV2(msrest.serialization.Model): """RunTypeV2. :ivar orchestrator: :vartype orchestrator: str :ivar traits: :vartype traits: list[str] :ivar attribution: :vartype attribution: str :ivar compute_type: :vartype compute_type: str """ _validation = { 'traits': {'unique': True}, } _attribute_map = { 'orchestrator': {'key': 'orchestrator', 'type': 'str'}, 'traits': {'key': 'traits', 'type': '[str]'}, 'attribution': {'key': 'attribution', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, } def __init__( self, *, orchestrator: Optional[str] = None, traits: Optional[List[str]] = None, attribution: Optional[str] = None, compute_type: Optional[str] = None, **kwargs ): """ :keyword orchestrator: :paramtype orchestrator: str :keyword traits: :paramtype traits: list[str] :keyword attribution: :paramtype attribution: str :keyword compute_type: :paramtype compute_type: str """ super(RunTypeV2, self).__init__(**kwargs) self.orchestrator = orchestrator self.traits = traits self.attribution = attribution self.compute_type = compute_type class RunTypeV2Index(msrest.serialization.Model): """RunTypeV2Index. :ivar orchestrator: :vartype orchestrator: str :ivar traits: Dictionary of :code:`<string>`. :vartype traits: dict[str, str] :ivar attribution: :vartype attribution: str :ivar compute_type: :vartype compute_type: str """ _attribute_map = { 'orchestrator': {'key': 'orchestrator', 'type': 'str'}, 'traits': {'key': 'traits', 'type': '{str}'}, 'attribution': {'key': 'attribution', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, } def __init__( self, *, orchestrator: Optional[str] = None, traits: Optional[Dict[str, str]] = None, attribution: Optional[str] = None, compute_type: Optional[str] = None, **kwargs ): """ :keyword orchestrator: :paramtype orchestrator: str :keyword traits: Dictionary of :code:`<string>`. :paramtype traits: dict[str, str] :keyword attribution: :paramtype attribution: str :keyword compute_type: :paramtype compute_type: str """ super(RunTypeV2Index, self).__init__(**kwargs) self.orchestrator = orchestrator self.traits = traits self.attribution = attribution self.compute_type = compute_type class SampleMeta(msrest.serialization.Model): """SampleMeta. :ivar image: :vartype image: str :ivar id: :vartype id: str :ivar display_name: :vartype display_name: str :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar doc_link: :vartype doc_link: str :ivar tags: A set of tags. :vartype tags: list[str] :ivar created_at: :vartype created_at: ~datetime.datetime :ivar updated_at: :vartype updated_at: ~datetime.datetime :ivar feed_name: :vartype feed_name: str :ivar version: :vartype version: str """ _attribute_map = { 'image': {'key': 'image', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'doc_link': {'key': 'docLink', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '[str]'}, 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, 'updated_at': {'key': 'updatedAt', 'type': 'iso-8601'}, 'feed_name': {'key': 'feedName', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, *, image: Optional[str] = None, id: Optional[str] = None, display_name: Optional[str] = None, name: Optional[str] = None, description: Optional[str] = None, doc_link: Optional[str] = None, tags: Optional[List[str]] = None, created_at: Optional[datetime.datetime] = None, updated_at: Optional[datetime.datetime] = None, feed_name: Optional[str] = None, version: Optional[str] = None, **kwargs ): """ :keyword image: :paramtype image: str :keyword id: :paramtype id: str :keyword display_name: :paramtype display_name: str :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword doc_link: :paramtype doc_link: str :keyword tags: A set of tags. :paramtype tags: list[str] :keyword created_at: :paramtype created_at: ~datetime.datetime :keyword updated_at: :paramtype updated_at: ~datetime.datetime :keyword feed_name: :paramtype feed_name: str :keyword version: :paramtype version: str """ super(SampleMeta, self).__init__(**kwargs) self.image = image self.id = id self.display_name = display_name self.name = name self.description = description self.doc_link = doc_link self.tags = tags self.created_at = created_at self.updated_at = updated_at self.feed_name = feed_name self.version = version class SavedDataSetReference(msrest.serialization.Model): """SavedDataSetReference. :ivar id: :vartype id: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, **kwargs ): """ :keyword id: :paramtype id: str """ super(SavedDataSetReference, self).__init__(**kwargs) self.id = id class SavePipelineDraftRequest(msrest.serialization.Model): """SavePipelineDraftRequest. :ivar ui_widget_meta_infos: :vartype ui_widget_meta_infos: list[~flow.models.UIWidgetMetaInfo] :ivar web_service_inputs: :vartype web_service_inputs: list[~flow.models.WebServicePort] :ivar web_service_outputs: :vartype web_service_outputs: list[~flow.models.WebServicePort] :ivar nodes_in_draft: :vartype nodes_in_draft: list[str] :ivar name: :vartype name: str :ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :vartype pipeline_type: str or ~flow.models.PipelineType :ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :vartype graph_components_mode: str or ~flow.models.GraphComponentsMode :ivar sub_pipelines_info: :vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo :ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar graph: :vartype graph: ~flow.models.GraphDraftEntity :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar description: :vartype description: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ _attribute_map = { 'ui_widget_meta_infos': {'key': 'uiWidgetMetaInfos', 'type': '[UIWidgetMetaInfo]'}, 'web_service_inputs': {'key': 'webServiceInputs', 'type': '[WebServicePort]'}, 'web_service_outputs': {'key': 'webServiceOutputs', 'type': '[WebServicePort]'}, 'nodes_in_draft': {'key': 'nodesInDraft', 'type': '[str]'}, 'name': {'key': 'name', 'type': 'str'}, 'pipeline_type': {'key': 'pipelineType', 'type': 'str'}, 'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'}, 'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'}, 'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'}, 'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'graph': {'key': 'graph', 'type': 'GraphDraftEntity'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'}, } def __init__( self, *, ui_widget_meta_infos: Optional[List["UIWidgetMetaInfo"]] = None, web_service_inputs: Optional[List["WebServicePort"]] = None, web_service_outputs: Optional[List["WebServicePort"]] = None, nodes_in_draft: Optional[List[str]] = None, name: Optional[str] = None, pipeline_type: Optional[Union[str, "PipelineType"]] = None, pipeline_draft_mode: Optional[Union[str, "PipelineDraftMode"]] = None, graph_components_mode: Optional[Union[str, "GraphComponentsMode"]] = None, sub_pipelines_info: Optional["SubPipelinesInfo"] = None, flattened_sub_graphs: Optional[Dict[str, "PipelineSubDraft"]] = None, pipeline_parameters: Optional[Dict[str, str]] = None, data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None, data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None, asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None, graph: Optional["GraphDraftEntity"] = None, pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None, module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None, module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None, tags: Optional[Dict[str, str]] = None, continue_run_on_step_failure: Optional[bool] = None, description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, enforce_rerun: Optional[bool] = None, dataset_access_modes: Optional[Union[str, "DatasetAccessModes"]] = None, **kwargs ): """ :keyword ui_widget_meta_infos: :paramtype ui_widget_meta_infos: list[~flow.models.UIWidgetMetaInfo] :keyword web_service_inputs: :paramtype web_service_inputs: list[~flow.models.WebServicePort] :keyword web_service_outputs: :paramtype web_service_outputs: list[~flow.models.WebServicePort] :keyword nodes_in_draft: :paramtype nodes_in_draft: list[str] :keyword name: :paramtype name: str :keyword pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown". :paramtype pipeline_type: str or ~flow.models.PipelineType :keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom". :paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode :keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin", "ContainsDesignerBuildin". :paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode :keyword sub_pipelines_info: :paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo :keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword graph: :paramtype graph: ~flow.models.GraphDraftEntity :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword description: :paramtype description: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ super(SavePipelineDraftRequest, self).__init__(**kwargs) self.ui_widget_meta_infos = ui_widget_meta_infos self.web_service_inputs = web_service_inputs self.web_service_outputs = web_service_outputs self.nodes_in_draft = nodes_in_draft self.name = name self.pipeline_type = pipeline_type self.pipeline_draft_mode = pipeline_draft_mode self.graph_components_mode = graph_components_mode self.sub_pipelines_info = sub_pipelines_info self.flattened_sub_graphs = flattened_sub_graphs self.pipeline_parameters = pipeline_parameters self.data_path_assignments = data_path_assignments self.data_set_definition_value_assignments = data_set_definition_value_assignments self.asset_output_settings_assignments = asset_output_settings_assignments self.graph = graph self.pipeline_run_settings = pipeline_run_settings self.module_node_run_settings = module_node_run_settings self.module_node_ui_input_settings = module_node_ui_input_settings self.tags = tags self.continue_run_on_step_failure = continue_run_on_step_failure self.description = description self.properties = properties self.enforce_rerun = enforce_rerun self.dataset_access_modes = dataset_access_modes class ScheduleBase(msrest.serialization.Model): """ScheduleBase. :ivar schedule_status: Possible values include: "Enabled", "Disabled". :vartype schedule_status: str or ~flow.models.MfeInternalScheduleStatus :ivar schedule_type: Possible values include: "Cron", "Recurrence". :vartype schedule_type: str or ~flow.models.ScheduleType :ivar end_time: :vartype end_time: ~datetime.datetime :ivar start_time: :vartype start_time: ~datetime.datetime :ivar time_zone: :vartype time_zone: str :ivar expression: :vartype expression: str :ivar frequency: Possible values include: "Minute", "Hour", "Day", "Week", "Month". :vartype frequency: str or ~flow.models.RecurrenceFrequency :ivar interval: :vartype interval: int :ivar pattern: :vartype pattern: ~flow.models.RecurrencePattern """ _attribute_map = { 'schedule_status': {'key': 'scheduleStatus', 'type': 'str'}, 'schedule_type': {'key': 'scheduleType', 'type': 'str'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'time_zone': {'key': 'timeZone', 'type': 'str'}, 'expression': {'key': 'expression', 'type': 'str'}, 'frequency': {'key': 'frequency', 'type': 'str'}, 'interval': {'key': 'interval', 'type': 'int'}, 'pattern': {'key': 'pattern', 'type': 'RecurrencePattern'}, } def __init__( self, *, schedule_status: Optional[Union[str, "MfeInternalScheduleStatus"]] = None, schedule_type: Optional[Union[str, "ScheduleType"]] = None, end_time: Optional[datetime.datetime] = None, start_time: Optional[datetime.datetime] = None, time_zone: Optional[str] = None, expression: Optional[str] = None, frequency: Optional[Union[str, "RecurrenceFrequency"]] = None, interval: Optional[int] = None, pattern: Optional["RecurrencePattern"] = None, **kwargs ): """ :keyword schedule_status: Possible values include: "Enabled", "Disabled". :paramtype schedule_status: str or ~flow.models.MfeInternalScheduleStatus :keyword schedule_type: Possible values include: "Cron", "Recurrence". :paramtype schedule_type: str or ~flow.models.ScheduleType :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword time_zone: :paramtype time_zone: str :keyword expression: :paramtype expression: str :keyword frequency: Possible values include: "Minute", "Hour", "Day", "Week", "Month". :paramtype frequency: str or ~flow.models.RecurrenceFrequency :keyword interval: :paramtype interval: int :keyword pattern: :paramtype pattern: ~flow.models.RecurrencePattern """ super(ScheduleBase, self).__init__(**kwargs) self.schedule_status = schedule_status self.schedule_type = schedule_type self.end_time = end_time self.start_time = start_time self.time_zone = time_zone self.expression = expression self.frequency = frequency self.interval = interval self.pattern = pattern class SchemaContractsCreatedBy(msrest.serialization.Model): """SchemaContractsCreatedBy. :ivar user_object_id: :vartype user_object_id: str :ivar user_tenant_id: :vartype user_tenant_id: str :ivar user_name: :vartype user_name: str :ivar user_principal_name: :vartype user_principal_name: str """ _attribute_map = { 'user_object_id': {'key': 'userObjectId', 'type': 'str'}, 'user_tenant_id': {'key': 'userTenantId', 'type': 'str'}, 'user_name': {'key': 'userName', 'type': 'str'}, 'user_principal_name': {'key': 'userPrincipalName', 'type': 'str'}, } def __init__( self, *, user_object_id: Optional[str] = None, user_tenant_id: Optional[str] = None, user_name: Optional[str] = None, user_principal_name: Optional[str] = None, **kwargs ): """ :keyword user_object_id: :paramtype user_object_id: str :keyword user_tenant_id: :paramtype user_tenant_id: str :keyword user_name: :paramtype user_name: str :keyword user_principal_name: :paramtype user_principal_name: str """ super(SchemaContractsCreatedBy, self).__init__(**kwargs) self.user_object_id = user_object_id self.user_tenant_id = user_tenant_id self.user_name = user_name self.user_principal_name = user_principal_name class ScopeCloudConfiguration(msrest.serialization.Model): """ScopeCloudConfiguration. :ivar input_path_suffixes: This is a dictionary. :vartype input_path_suffixes: dict[str, ~flow.models.ArgumentAssignment] :ivar output_path_suffixes: This is a dictionary. :vartype output_path_suffixes: dict[str, ~flow.models.ArgumentAssignment] :ivar user_alias: :vartype user_alias: str :ivar tokens: :vartype tokens: int :ivar auto_token: :vartype auto_token: int :ivar vcp: :vartype vcp: float """ _attribute_map = { 'input_path_suffixes': {'key': 'inputPathSuffixes', 'type': '{ArgumentAssignment}'}, 'output_path_suffixes': {'key': 'outputPathSuffixes', 'type': '{ArgumentAssignment}'}, 'user_alias': {'key': 'userAlias', 'type': 'str'}, 'tokens': {'key': 'tokens', 'type': 'int'}, 'auto_token': {'key': 'autoToken', 'type': 'int'}, 'vcp': {'key': 'vcp', 'type': 'float'}, } def __init__( self, *, input_path_suffixes: Optional[Dict[str, "ArgumentAssignment"]] = None, output_path_suffixes: Optional[Dict[str, "ArgumentAssignment"]] = None, user_alias: Optional[str] = None, tokens: Optional[int] = None, auto_token: Optional[int] = None, vcp: Optional[float] = None, **kwargs ): """ :keyword input_path_suffixes: This is a dictionary. :paramtype input_path_suffixes: dict[str, ~flow.models.ArgumentAssignment] :keyword output_path_suffixes: This is a dictionary. :paramtype output_path_suffixes: dict[str, ~flow.models.ArgumentAssignment] :keyword user_alias: :paramtype user_alias: str :keyword tokens: :paramtype tokens: int :keyword auto_token: :paramtype auto_token: int :keyword vcp: :paramtype vcp: float """ super(ScopeCloudConfiguration, self).__init__(**kwargs) self.input_path_suffixes = input_path_suffixes self.output_path_suffixes = output_path_suffixes self.user_alias = user_alias self.tokens = tokens self.auto_token = auto_token self.vcp = vcp class Seasonality(msrest.serialization.Model): """Seasonality. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.SeasonalityMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, *, mode: Optional[Union[str, "SeasonalityMode"]] = None, value: Optional[int] = None, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.SeasonalityMode :keyword value: :paramtype value: int """ super(Seasonality, self).__init__(**kwargs) self.mode = mode self.value = value class SecretConfiguration(msrest.serialization.Model): """SecretConfiguration. :ivar workspace_secret_name: :vartype workspace_secret_name: str :ivar uri: :vartype uri: str """ _attribute_map = { 'workspace_secret_name': {'key': 'workspace_secret_name', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, } def __init__( self, *, workspace_secret_name: Optional[str] = None, uri: Optional[str] = None, **kwargs ): """ :keyword workspace_secret_name: :paramtype workspace_secret_name: str :keyword uri: :paramtype uri: str """ super(SecretConfiguration, self).__init__(**kwargs) self.workspace_secret_name = workspace_secret_name self.uri = uri class SegmentedResult1(msrest.serialization.Model): """SegmentedResult1. :ivar value: :vartype value: list[~flow.models.FlowIndexEntity] :ivar continuation_token: :vartype continuation_token: str :ivar count: :vartype count: int :ivar next_link: :vartype next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[FlowIndexEntity]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'count': {'key': 'count', 'type': 'int'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, value: Optional[List["FlowIndexEntity"]] = None, continuation_token: Optional[str] = None, count: Optional[int] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: :paramtype value: list[~flow.models.FlowIndexEntity] :keyword continuation_token: :paramtype continuation_token: str :keyword count: :paramtype count: int :keyword next_link: :paramtype next_link: str """ super(SegmentedResult1, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token self.count = count self.next_link = next_link class ServiceLogRequest(msrest.serialization.Model): """ServiceLogRequest. :ivar log_level: Possible values include: "Trace", "Debug", "Information", "Warning", "Error", "Critical", "None". :vartype log_level: str or ~flow.models.LogLevel :ivar message: :vartype message: str :ivar timestamp: :vartype timestamp: ~datetime.datetime """ _attribute_map = { 'log_level': {'key': 'logLevel', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, } def __init__( self, *, log_level: Optional[Union[str, "LogLevel"]] = None, message: Optional[str] = None, timestamp: Optional[datetime.datetime] = None, **kwargs ): """ :keyword log_level: Possible values include: "Trace", "Debug", "Information", "Warning", "Error", "Critical", "None". :paramtype log_level: str or ~flow.models.LogLevel :keyword message: :paramtype message: str :keyword timestamp: :paramtype timestamp: ~datetime.datetime """ super(ServiceLogRequest, self).__init__(**kwargs) self.log_level = log_level self.message = message self.timestamp = timestamp class SessionApplication(msrest.serialization.Model): """SessionApplication. :ivar image: :vartype image: str :ivar env_vars: Dictionary of :code:`<string>`. :vartype env_vars: dict[str, str] :ivar python_pip_requirements: :vartype python_pip_requirements: list[str] :ivar setup_results: :vartype setup_results: list[~flow.models.SessionApplicationRunCommandResult] """ _attribute_map = { 'image': {'key': 'image', 'type': 'str'}, 'env_vars': {'key': 'envVars', 'type': '{str}'}, 'python_pip_requirements': {'key': 'pythonPipRequirements', 'type': '[str]'}, 'setup_results': {'key': 'setupResults', 'type': '[SessionApplicationRunCommandResult]'}, } def __init__( self, *, image: Optional[str] = None, env_vars: Optional[Dict[str, str]] = None, python_pip_requirements: Optional[List[str]] = None, setup_results: Optional[List["SessionApplicationRunCommandResult"]] = None, **kwargs ): """ :keyword image: :paramtype image: str :keyword env_vars: Dictionary of :code:`<string>`. :paramtype env_vars: dict[str, str] :keyword python_pip_requirements: :paramtype python_pip_requirements: list[str] :keyword setup_results: :paramtype setup_results: list[~flow.models.SessionApplicationRunCommandResult] """ super(SessionApplication, self).__init__(**kwargs) self.image = image self.env_vars = env_vars self.python_pip_requirements = python_pip_requirements self.setup_results = setup_results class SessionApplicationRunCommandResult(msrest.serialization.Model): """SessionApplicationRunCommandResult. :ivar command: :vartype command: str :ivar arguments: :vartype arguments: list[str] :ivar exit_code: :vartype exit_code: int :ivar std_out: :vartype std_out: str :ivar std_err: :vartype std_err: str """ _attribute_map = { 'command': {'key': 'command', 'type': 'str'}, 'arguments': {'key': 'arguments', 'type': '[str]'}, 'exit_code': {'key': 'exitCode', 'type': 'int'}, 'std_out': {'key': 'stdOut', 'type': 'str'}, 'std_err': {'key': 'stdErr', 'type': 'str'}, } def __init__( self, *, command: Optional[str] = None, arguments: Optional[List[str]] = None, exit_code: Optional[int] = None, std_out: Optional[str] = None, std_err: Optional[str] = None, **kwargs ): """ :keyword command: :paramtype command: str :keyword arguments: :paramtype arguments: list[str] :keyword exit_code: :paramtype exit_code: int :keyword std_out: :paramtype std_out: str :keyword std_err: :paramtype std_err: str """ super(SessionApplicationRunCommandResult, self).__init__(**kwargs) self.command = command self.arguments = arguments self.exit_code = exit_code self.std_out = std_out self.std_err = std_err class SessionProperties(msrest.serialization.Model): """SessionProperties. :ivar session_id: :vartype session_id: str :ivar subscription_id: :vartype subscription_id: str :ivar resource_group_name: :vartype resource_group_name: str :ivar workspace_name: :vartype workspace_name: str :ivar user_object_id: :vartype user_object_id: str :ivar user_tenant_id: :vartype user_tenant_id: str :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar application: :vartype application: ~flow.models.SessionApplication :ivar last_alive_time: :vartype last_alive_time: ~datetime.datetime """ _attribute_map = { 'session_id': {'key': 'sessionId', 'type': 'str'}, 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, 'resource_group_name': {'key': 'resourceGroupName', 'type': 'str'}, 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, 'user_object_id': {'key': 'userObjectId', 'type': 'str'}, 'user_tenant_id': {'key': 'userTenantId', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'application': {'key': 'application', 'type': 'SessionApplication'}, 'last_alive_time': {'key': 'lastAliveTime', 'type': 'iso-8601'}, } def __init__( self, *, session_id: Optional[str] = None, subscription_id: Optional[str] = None, resource_group_name: Optional[str] = None, workspace_name: Optional[str] = None, user_object_id: Optional[str] = None, user_tenant_id: Optional[str] = None, vm_size: Optional[str] = None, max_idle_time_seconds: Optional[int] = None, application: Optional["SessionApplication"] = None, last_alive_time: Optional[datetime.datetime] = None, **kwargs ): """ :keyword session_id: :paramtype session_id: str :keyword subscription_id: :paramtype subscription_id: str :keyword resource_group_name: :paramtype resource_group_name: str :keyword workspace_name: :paramtype workspace_name: str :keyword user_object_id: :paramtype user_object_id: str :keyword user_tenant_id: :paramtype user_tenant_id: str :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword application: :paramtype application: ~flow.models.SessionApplication :keyword last_alive_time: :paramtype last_alive_time: ~datetime.datetime """ super(SessionProperties, self).__init__(**kwargs) self.session_id = session_id self.subscription_id = subscription_id self.resource_group_name = resource_group_name self.workspace_name = workspace_name self.user_object_id = user_object_id self.user_tenant_id = user_tenant_id self.vm_size = vm_size self.max_idle_time_seconds = max_idle_time_seconds self.application = application self.last_alive_time = last_alive_time class SetupFlowSessionRequest(msrest.serialization.Model): """SetupFlowSessionRequest. :ivar action: Possible values include: "Install", "Reset", "Update", "Delete". :vartype action: str or ~flow.models.SetupFlowSessionAction :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar identity: :vartype identity: str """ _attribute_map = { 'action': {'key': 'action', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, *, action: Optional[Union[str, "SetupFlowSessionAction"]] = None, vm_size: Optional[str] = None, max_idle_time_seconds: Optional[int] = None, identity: Optional[str] = None, **kwargs ): """ :keyword action: Possible values include: "Install", "Reset", "Update", "Delete". :paramtype action: str or ~flow.models.SetupFlowSessionAction :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword identity: :paramtype identity: str """ super(SetupFlowSessionRequest, self).__init__(**kwargs) self.action = action self.vm_size = vm_size self.max_idle_time_seconds = max_idle_time_seconds self.identity = identity class SharingScope(msrest.serialization.Model): """SharingScope. :ivar type: Possible values include: "Global", "Tenant", "Subscription", "ResourceGroup", "Workspace". :vartype type: str or ~flow.models.ScopeType :ivar identifier: :vartype identifier: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'identifier': {'key': 'identifier', 'type': 'str'}, } def __init__( self, *, type: Optional[Union[str, "ScopeType"]] = None, identifier: Optional[str] = None, **kwargs ): """ :keyword type: Possible values include: "Global", "Tenant", "Subscription", "ResourceGroup", "Workspace". :paramtype type: str or ~flow.models.ScopeType :keyword identifier: :paramtype identifier: str """ super(SharingScope, self).__init__(**kwargs) self.type = type self.identifier = identifier class Snapshot(msrest.serialization.Model): """Snapshot. :ivar id: :vartype id: str :ivar directory_name: :vartype directory_name: str :ivar snapshot_asset_id: :vartype snapshot_asset_id: str :ivar snapshot_entity_id: :vartype snapshot_entity_id: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'directory_name': {'key': 'directoryName', 'type': 'str'}, 'snapshot_asset_id': {'key': 'snapshotAssetId', 'type': 'str'}, 'snapshot_entity_id': {'key': 'snapshotEntityId', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, directory_name: Optional[str] = None, snapshot_asset_id: Optional[str] = None, snapshot_entity_id: Optional[str] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword directory_name: :paramtype directory_name: str :keyword snapshot_asset_id: :paramtype snapshot_asset_id: str :keyword snapshot_entity_id: :paramtype snapshot_entity_id: str """ super(Snapshot, self).__init__(**kwargs) self.id = id self.directory_name = directory_name self.snapshot_asset_id = snapshot_asset_id self.snapshot_entity_id = snapshot_entity_id class SnapshotInfo(msrest.serialization.Model): """SnapshotInfo. :ivar root_download_url: :vartype root_download_url: str :ivar snapshots: This is a dictionary. :vartype snapshots: dict[str, ~flow.models.DownloadResourceInfo] """ _attribute_map = { 'root_download_url': {'key': 'rootDownloadUrl', 'type': 'str'}, 'snapshots': {'key': 'snapshots', 'type': '{DownloadResourceInfo}'}, } def __init__( self, *, root_download_url: Optional[str] = None, snapshots: Optional[Dict[str, "DownloadResourceInfo"]] = None, **kwargs ): """ :keyword root_download_url: :paramtype root_download_url: str :keyword snapshots: This is a dictionary. :paramtype snapshots: dict[str, ~flow.models.DownloadResourceInfo] """ super(SnapshotInfo, self).__init__(**kwargs) self.root_download_url = root_download_url self.snapshots = snapshots class SourceCodeDataReference(msrest.serialization.Model): """SourceCodeDataReference. :ivar data_store_name: :vartype data_store_name: str :ivar path: :vartype path: str """ _attribute_map = { 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'path': {'key': 'path', 'type': 'str'}, } def __init__( self, *, data_store_name: Optional[str] = None, path: Optional[str] = None, **kwargs ): """ :keyword data_store_name: :paramtype data_store_name: str :keyword path: :paramtype path: str """ super(SourceCodeDataReference, self).__init__(**kwargs) self.data_store_name = data_store_name self.path = path class SparkConfiguration(msrest.serialization.Model): """SparkConfiguration. :ivar configuration: Dictionary of :code:`<string>`. :vartype configuration: dict[str, str] :ivar files: :vartype files: list[str] :ivar archives: :vartype archives: list[str] :ivar jars: :vartype jars: list[str] :ivar py_files: :vartype py_files: list[str] :ivar spark_pool_resource_id: :vartype spark_pool_resource_id: str """ _attribute_map = { 'configuration': {'key': 'configuration', 'type': '{str}'}, 'files': {'key': 'files', 'type': '[str]'}, 'archives': {'key': 'archives', 'type': '[str]'}, 'jars': {'key': 'jars', 'type': '[str]'}, 'py_files': {'key': 'pyFiles', 'type': '[str]'}, 'spark_pool_resource_id': {'key': 'sparkPoolResourceId', 'type': 'str'}, } def __init__( self, *, configuration: Optional[Dict[str, str]] = None, files: Optional[List[str]] = None, archives: Optional[List[str]] = None, jars: Optional[List[str]] = None, py_files: Optional[List[str]] = None, spark_pool_resource_id: Optional[str] = None, **kwargs ): """ :keyword configuration: Dictionary of :code:`<string>`. :paramtype configuration: dict[str, str] :keyword files: :paramtype files: list[str] :keyword archives: :paramtype archives: list[str] :keyword jars: :paramtype jars: list[str] :keyword py_files: :paramtype py_files: list[str] :keyword spark_pool_resource_id: :paramtype spark_pool_resource_id: str """ super(SparkConfiguration, self).__init__(**kwargs) self.configuration = configuration self.files = files self.archives = archives self.jars = jars self.py_files = py_files self.spark_pool_resource_id = spark_pool_resource_id class SparkJarTaskDto(msrest.serialization.Model): """SparkJarTaskDto. :ivar main_class_name: :vartype main_class_name: str :ivar parameters: :vartype parameters: list[str] """ _attribute_map = { 'main_class_name': {'key': 'main_class_name', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '[str]'}, } def __init__( self, *, main_class_name: Optional[str] = None, parameters: Optional[List[str]] = None, **kwargs ): """ :keyword main_class_name: :paramtype main_class_name: str :keyword parameters: :paramtype parameters: list[str] """ super(SparkJarTaskDto, self).__init__(**kwargs) self.main_class_name = main_class_name self.parameters = parameters class SparkJob(msrest.serialization.Model): """SparkJob. :ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :vartype job_type: str or ~flow.models.JobType :ivar resources: :vartype resources: ~flow.models.SparkResourceConfiguration :ivar args: :vartype args: str :ivar code_id: :vartype code_id: str :ivar entry: :vartype entry: ~flow.models.SparkJobEntry :ivar py_files: :vartype py_files: list[str] :ivar jars: :vartype jars: list[str] :ivar files: :vartype files: list[str] :ivar archives: :vartype archives: list[str] :ivar environment_id: :vartype environment_id: str :ivar input_data_bindings: Dictionary of :code:`<InputDataBinding>`. :vartype input_data_bindings: dict[str, ~flow.models.InputDataBinding] :ivar output_data_bindings: Dictionary of :code:`<OutputDataBinding>`. :vartype output_data_bindings: dict[str, ~flow.models.OutputDataBinding] :ivar conf: Dictionary of :code:`<string>`. :vartype conf: dict[str, str] :ivar environment_variables: Dictionary of :code:`<string>`. :vartype environment_variables: dict[str, str] :ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled", "InProgress". :vartype provisioning_state: str or ~flow.models.JobProvisioningState :ivar parent_job_name: :vartype parent_job_name: str :ivar display_name: :vartype display_name: str :ivar experiment_name: :vartype experiment_name: str :ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". :vartype status: str or ~flow.models.JobStatus :ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`. :vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint] :ivar identity: :vartype identity: ~flow.models.MfeInternalIdentityConfiguration :ivar compute: :vartype compute: ~flow.models.ComputeConfiguration :ivar priority: :vartype priority: int :ivar output: :vartype output: ~flow.models.JobOutputArtifacts :ivar is_archived: :vartype is_archived: bool :ivar schedule: :vartype schedule: ~flow.models.ScheduleBase :ivar component_id: :vartype component_id: str :ivar notification_setting: :vartype notification_setting: ~flow.models.NotificationSetting :ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`. :vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration] :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] """ _attribute_map = { 'job_type': {'key': 'jobType', 'type': 'str'}, 'resources': {'key': 'resources', 'type': 'SparkResourceConfiguration'}, 'args': {'key': 'args', 'type': 'str'}, 'code_id': {'key': 'codeId', 'type': 'str'}, 'entry': {'key': 'entry', 'type': 'SparkJobEntry'}, 'py_files': {'key': 'pyFiles', 'type': '[str]'}, 'jars': {'key': 'jars', 'type': '[str]'}, 'files': {'key': 'files', 'type': '[str]'}, 'archives': {'key': 'archives', 'type': '[str]'}, 'environment_id': {'key': 'environmentId', 'type': 'str'}, 'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'}, 'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'}, 'conf': {'key': 'conf', 'type': '{str}'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'parent_job_name': {'key': 'parentJobName', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, 'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'}, 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, 'priority': {'key': 'priority', 'type': 'int'}, 'output': {'key': 'output', 'type': 'JobOutputArtifacts'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'schedule': {'key': 'schedule', 'type': 'ScheduleBase'}, 'component_id': {'key': 'componentId', 'type': 'str'}, 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, } def __init__( self, *, job_type: Optional[Union[str, "JobType"]] = None, resources: Optional["SparkResourceConfiguration"] = None, args: Optional[str] = None, code_id: Optional[str] = None, entry: Optional["SparkJobEntry"] = None, py_files: Optional[List[str]] = None, jars: Optional[List[str]] = None, files: Optional[List[str]] = None, archives: Optional[List[str]] = None, environment_id: Optional[str] = None, input_data_bindings: Optional[Dict[str, "InputDataBinding"]] = None, output_data_bindings: Optional[Dict[str, "OutputDataBinding"]] = None, conf: Optional[Dict[str, str]] = None, environment_variables: Optional[Dict[str, str]] = None, provisioning_state: Optional[Union[str, "JobProvisioningState"]] = None, parent_job_name: Optional[str] = None, display_name: Optional[str] = None, experiment_name: Optional[str] = None, status: Optional[Union[str, "JobStatus"]] = None, interaction_endpoints: Optional[Dict[str, "JobEndpoint"]] = None, identity: Optional["MfeInternalIdentityConfiguration"] = None, compute: Optional["ComputeConfiguration"] = None, priority: Optional[int] = None, output: Optional["JobOutputArtifacts"] = None, is_archived: Optional[bool] = None, schedule: Optional["ScheduleBase"] = None, component_id: Optional[str] = None, notification_setting: Optional["NotificationSetting"] = None, secrets_configuration: Optional[Dict[str, "MfeInternalSecretConfiguration"]] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data", "AutoML", "Spark", "Base". :paramtype job_type: str or ~flow.models.JobType :keyword resources: :paramtype resources: ~flow.models.SparkResourceConfiguration :keyword args: :paramtype args: str :keyword code_id: :paramtype code_id: str :keyword entry: :paramtype entry: ~flow.models.SparkJobEntry :keyword py_files: :paramtype py_files: list[str] :keyword jars: :paramtype jars: list[str] :keyword files: :paramtype files: list[str] :keyword archives: :paramtype archives: list[str] :keyword environment_id: :paramtype environment_id: str :keyword input_data_bindings: Dictionary of :code:`<InputDataBinding>`. :paramtype input_data_bindings: dict[str, ~flow.models.InputDataBinding] :keyword output_data_bindings: Dictionary of :code:`<OutputDataBinding>`. :paramtype output_data_bindings: dict[str, ~flow.models.OutputDataBinding] :keyword conf: Dictionary of :code:`<string>`. :paramtype conf: dict[str, str] :keyword environment_variables: Dictionary of :code:`<string>`. :paramtype environment_variables: dict[str, str] :keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled", "InProgress". :paramtype provisioning_state: str or ~flow.models.JobProvisioningState :keyword parent_job_name: :paramtype parent_job_name: str :keyword display_name: :paramtype display_name: str :keyword experiment_name: :paramtype experiment_name: str :keyword status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". :paramtype status: str or ~flow.models.JobStatus :keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`. :paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint] :keyword identity: :paramtype identity: ~flow.models.MfeInternalIdentityConfiguration :keyword compute: :paramtype compute: ~flow.models.ComputeConfiguration :keyword priority: :paramtype priority: int :keyword output: :paramtype output: ~flow.models.JobOutputArtifacts :keyword is_archived: :paramtype is_archived: bool :keyword schedule: :paramtype schedule: ~flow.models.ScheduleBase :keyword component_id: :paramtype component_id: str :keyword notification_setting: :paramtype notification_setting: ~flow.models.NotificationSetting :keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`. :paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration] :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] """ super(SparkJob, self).__init__(**kwargs) self.job_type = job_type self.resources = resources self.args = args self.code_id = code_id self.entry = entry self.py_files = py_files self.jars = jars self.files = files self.archives = archives self.environment_id = environment_id self.input_data_bindings = input_data_bindings self.output_data_bindings = output_data_bindings self.conf = conf self.environment_variables = environment_variables self.provisioning_state = provisioning_state self.parent_job_name = parent_job_name self.display_name = display_name self.experiment_name = experiment_name self.status = status self.interaction_endpoints = interaction_endpoints self.identity = identity self.compute = compute self.priority = priority self.output = output self.is_archived = is_archived self.schedule = schedule self.component_id = component_id self.notification_setting = notification_setting self.secrets_configuration = secrets_configuration self.description = description self.tags = tags self.properties = properties class SparkJobEntry(msrest.serialization.Model): """SparkJobEntry. :ivar file: :vartype file: str :ivar class_name: :vartype class_name: str """ _attribute_map = { 'file': {'key': 'file', 'type': 'str'}, 'class_name': {'key': 'className', 'type': 'str'}, } def __init__( self, *, file: Optional[str] = None, class_name: Optional[str] = None, **kwargs ): """ :keyword file: :paramtype file: str :keyword class_name: :paramtype class_name: str """ super(SparkJobEntry, self).__init__(**kwargs) self.file = file self.class_name = class_name class SparkMavenPackage(msrest.serialization.Model): """SparkMavenPackage. :ivar group: :vartype group: str :ivar artifact: :vartype artifact: str :ivar version: :vartype version: str """ _attribute_map = { 'group': {'key': 'group', 'type': 'str'}, 'artifact': {'key': 'artifact', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, *, group: Optional[str] = None, artifact: Optional[str] = None, version: Optional[str] = None, **kwargs ): """ :keyword group: :paramtype group: str :keyword artifact: :paramtype artifact: str :keyword version: :paramtype version: str """ super(SparkMavenPackage, self).__init__(**kwargs) self.group = group self.artifact = artifact self.version = version class SparkPythonTaskDto(msrest.serialization.Model): """SparkPythonTaskDto. :ivar python_file: :vartype python_file: str :ivar parameters: :vartype parameters: list[str] """ _attribute_map = { 'python_file': {'key': 'python_file', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '[str]'}, } def __init__( self, *, python_file: Optional[str] = None, parameters: Optional[List[str]] = None, **kwargs ): """ :keyword python_file: :paramtype python_file: str :keyword parameters: :paramtype parameters: list[str] """ super(SparkPythonTaskDto, self).__init__(**kwargs) self.python_file = python_file self.parameters = parameters class SparkResourceConfiguration(msrest.serialization.Model): """SparkResourceConfiguration. :ivar instance_type: :vartype instance_type: str :ivar runtime_version: :vartype runtime_version: str """ _attribute_map = { 'instance_type': {'key': 'instanceType', 'type': 'str'}, 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, } def __init__( self, *, instance_type: Optional[str] = None, runtime_version: Optional[str] = None, **kwargs ): """ :keyword instance_type: :paramtype instance_type: str :keyword runtime_version: :paramtype runtime_version: str """ super(SparkResourceConfiguration, self).__init__(**kwargs) self.instance_type = instance_type self.runtime_version = runtime_version class SparkSection(msrest.serialization.Model): """SparkSection. :ivar repositories: :vartype repositories: list[str] :ivar packages: :vartype packages: list[~flow.models.SparkMavenPackage] :ivar precache_packages: :vartype precache_packages: bool """ _attribute_map = { 'repositories': {'key': 'repositories', 'type': '[str]'}, 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'}, 'precache_packages': {'key': 'precachePackages', 'type': 'bool'}, } def __init__( self, *, repositories: Optional[List[str]] = None, packages: Optional[List["SparkMavenPackage"]] = None, precache_packages: Optional[bool] = None, **kwargs ): """ :keyword repositories: :paramtype repositories: list[str] :keyword packages: :paramtype packages: list[~flow.models.SparkMavenPackage] :keyword precache_packages: :paramtype precache_packages: bool """ super(SparkSection, self).__init__(**kwargs) self.repositories = repositories self.packages = packages self.precache_packages = precache_packages class SparkSubmitTaskDto(msrest.serialization.Model): """SparkSubmitTaskDto. :ivar parameters: :vartype parameters: list[str] """ _attribute_map = { 'parameters': {'key': 'parameters', 'type': '[str]'}, } def __init__( self, *, parameters: Optional[List[str]] = None, **kwargs ): """ :keyword parameters: :paramtype parameters: list[str] """ super(SparkSubmitTaskDto, self).__init__(**kwargs) self.parameters = parameters class SqlDataPath(msrest.serialization.Model): """SqlDataPath. :ivar sql_table_name: :vartype sql_table_name: str :ivar sql_query: :vartype sql_query: str :ivar sql_stored_procedure_name: :vartype sql_stored_procedure_name: str :ivar sql_stored_procedure_params: :vartype sql_stored_procedure_params: list[~flow.models.StoredProcedureParameter] """ _attribute_map = { 'sql_table_name': {'key': 'sqlTableName', 'type': 'str'}, 'sql_query': {'key': 'sqlQuery', 'type': 'str'}, 'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'}, 'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[StoredProcedureParameter]'}, } def __init__( self, *, sql_table_name: Optional[str] = None, sql_query: Optional[str] = None, sql_stored_procedure_name: Optional[str] = None, sql_stored_procedure_params: Optional[List["StoredProcedureParameter"]] = None, **kwargs ): """ :keyword sql_table_name: :paramtype sql_table_name: str :keyword sql_query: :paramtype sql_query: str :keyword sql_stored_procedure_name: :paramtype sql_stored_procedure_name: str :keyword sql_stored_procedure_params: :paramtype sql_stored_procedure_params: list[~flow.models.StoredProcedureParameter] """ super(SqlDataPath, self).__init__(**kwargs) self.sql_table_name = sql_table_name self.sql_query = sql_query self.sql_stored_procedure_name = sql_stored_procedure_name self.sql_stored_procedure_params = sql_stored_procedure_params class StackEnsembleSettings(msrest.serialization.Model): """StackEnsembleSettings. :ivar stack_meta_learner_type: Possible values include: "None", "LogisticRegression", "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", "LightGBMRegressor", "LinearRegression". :vartype stack_meta_learner_type: str or ~flow.models.StackMetaLearnerType :ivar stack_meta_learner_train_percentage: :vartype stack_meta_learner_train_percentage: float :ivar stack_meta_learner_k_wargs: Anything. :vartype stack_meta_learner_k_wargs: any """ _attribute_map = { 'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'}, 'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'}, 'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'}, } def __init__( self, *, stack_meta_learner_type: Optional[Union[str, "StackMetaLearnerType"]] = None, stack_meta_learner_train_percentage: Optional[float] = None, stack_meta_learner_k_wargs: Optional[Any] = None, **kwargs ): """ :keyword stack_meta_learner_type: Possible values include: "None", "LogisticRegression", "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", "LightGBMRegressor", "LinearRegression". :paramtype stack_meta_learner_type: str or ~flow.models.StackMetaLearnerType :keyword stack_meta_learner_train_percentage: :paramtype stack_meta_learner_train_percentage: float :keyword stack_meta_learner_k_wargs: Anything. :paramtype stack_meta_learner_k_wargs: any """ super(StackEnsembleSettings, self).__init__(**kwargs) self.stack_meta_learner_type = stack_meta_learner_type self.stack_meta_learner_train_percentage = stack_meta_learner_train_percentage self.stack_meta_learner_k_wargs = stack_meta_learner_k_wargs class StandbyPoolProperties(msrest.serialization.Model): """StandbyPoolProperties. :ivar name: :vartype name: str :ivar count: :vartype count: int :ivar vm_size: :vartype vm_size: str :ivar standby_available_instances: :vartype standby_available_instances: list[~flow.models.StandbyPoolResourceStatus] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'count': {'key': 'count', 'type': 'int'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'standby_available_instances': {'key': 'standbyAvailableInstances', 'type': '[StandbyPoolResourceStatus]'}, } def __init__( self, *, name: Optional[str] = None, count: Optional[int] = None, vm_size: Optional[str] = None, standby_available_instances: Optional[List["StandbyPoolResourceStatus"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword count: :paramtype count: int :keyword vm_size: :paramtype vm_size: str :keyword standby_available_instances: :paramtype standby_available_instances: list[~flow.models.StandbyPoolResourceStatus] """ super(StandbyPoolProperties, self).__init__(**kwargs) self.name = name self.count = count self.vm_size = vm_size self.standby_available_instances = standby_available_instances class StandbyPoolResourceStatus(msrest.serialization.Model): """StandbyPoolResourceStatus. :ivar status: :vartype status: str :ivar error: :vartype error: ~flow.models.CloudError """ _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'error': {'key': 'error', 'type': 'CloudError'}, } def __init__( self, *, status: Optional[str] = None, error: Optional["CloudError"] = None, **kwargs ): """ :keyword status: :paramtype status: str :keyword error: :paramtype error: ~flow.models.CloudError """ super(StandbyPoolResourceStatus, self).__init__(**kwargs) self.status = status self.error = error class StartRunResult(msrest.serialization.Model): """StartRunResult. All required parameters must be populated in order to send to Azure. :ivar run_id: Required. :vartype run_id: str """ _validation = { 'run_id': {'required': True, 'min_length': 1}, } _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, } def __init__( self, *, run_id: str, **kwargs ): """ :keyword run_id: Required. :paramtype run_id: str """ super(StartRunResult, self).__init__(**kwargs) self.run_id = run_id class StepRunProfile(msrest.serialization.Model): """StepRunProfile. :ivar step_run_id: :vartype step_run_id: str :ivar step_run_number: :vartype step_run_number: int :ivar run_url: :vartype run_url: str :ivar compute_target: :vartype compute_target: str :ivar compute_target_url: :vartype compute_target_url: str :ivar node_id: :vartype node_id: str :ivar node_name: :vartype node_name: str :ivar step_name: :vartype step_name: str :ivar create_time: :vartype create_time: long :ivar start_time: :vartype start_time: long :ivar end_time: :vartype end_time: long :ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :vartype status: str or ~flow.models.RunStatus :ivar status_detail: :vartype status_detail: str :ivar is_reused: :vartype is_reused: bool :ivar reused_pipeline_run_id: :vartype reused_pipeline_run_id: str :ivar reused_step_run_id: :vartype reused_step_run_id: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar status_timeline: :vartype status_timeline: list[~flow.models.RunStatusPeriod] """ _attribute_map = { 'step_run_id': {'key': 'stepRunId', 'type': 'str'}, 'step_run_number': {'key': 'stepRunNumber', 'type': 'int'}, 'run_url': {'key': 'runUrl', 'type': 'str'}, 'compute_target': {'key': 'computeTarget', 'type': 'str'}, 'compute_target_url': {'key': 'computeTargetUrl', 'type': 'str'}, 'node_id': {'key': 'nodeId', 'type': 'str'}, 'node_name': {'key': 'nodeName', 'type': 'str'}, 'step_name': {'key': 'stepName', 'type': 'str'}, 'create_time': {'key': 'createTime', 'type': 'long'}, 'start_time': {'key': 'startTime', 'type': 'long'}, 'end_time': {'key': 'endTime', 'type': 'long'}, 'status': {'key': 'status', 'type': 'str'}, 'status_detail': {'key': 'statusDetail', 'type': 'str'}, 'is_reused': {'key': 'isReused', 'type': 'bool'}, 'reused_pipeline_run_id': {'key': 'reusedPipelineRunId', 'type': 'str'}, 'reused_step_run_id': {'key': 'reusedStepRunId', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'status_timeline': {'key': 'statusTimeline', 'type': '[RunStatusPeriod]'}, } def __init__( self, *, step_run_id: Optional[str] = None, step_run_number: Optional[int] = None, run_url: Optional[str] = None, compute_target: Optional[str] = None, compute_target_url: Optional[str] = None, node_id: Optional[str] = None, node_name: Optional[str] = None, step_name: Optional[str] = None, create_time: Optional[int] = None, start_time: Optional[int] = None, end_time: Optional[int] = None, status: Optional[Union[str, "RunStatus"]] = None, status_detail: Optional[str] = None, is_reused: Optional[bool] = None, reused_pipeline_run_id: Optional[str] = None, reused_step_run_id: Optional[str] = None, tags: Optional[Dict[str, str]] = None, status_timeline: Optional[List["RunStatusPeriod"]] = None, **kwargs ): """ :keyword step_run_id: :paramtype step_run_id: str :keyword step_run_number: :paramtype step_run_number: int :keyword run_url: :paramtype run_url: str :keyword compute_target: :paramtype compute_target: str :keyword compute_target_url: :paramtype compute_target_url: str :keyword node_id: :paramtype node_id: str :keyword node_name: :paramtype node_name: str :keyword step_name: :paramtype step_name: str :keyword create_time: :paramtype create_time: long :keyword start_time: :paramtype start_time: long :keyword end_time: :paramtype end_time: long :keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled". :paramtype status: str or ~flow.models.RunStatus :keyword status_detail: :paramtype status_detail: str :keyword is_reused: :paramtype is_reused: bool :keyword reused_pipeline_run_id: :paramtype reused_pipeline_run_id: str :keyword reused_step_run_id: :paramtype reused_step_run_id: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword status_timeline: :paramtype status_timeline: list[~flow.models.RunStatusPeriod] """ super(StepRunProfile, self).__init__(**kwargs) self.step_run_id = step_run_id self.step_run_number = step_run_number self.run_url = run_url self.compute_target = compute_target self.compute_target_url = compute_target_url self.node_id = node_id self.node_name = node_name self.step_name = step_name self.create_time = create_time self.start_time = start_time self.end_time = end_time self.status = status self.status_detail = status_detail self.is_reused = is_reused self.reused_pipeline_run_id = reused_pipeline_run_id self.reused_step_run_id = reused_step_run_id self.tags = tags self.status_timeline = status_timeline class StorageInfo(msrest.serialization.Model): """StorageInfo. :ivar storage_auth_type: Possible values include: "MSI", "ConnectionString", "SAS". :vartype storage_auth_type: str or ~flow.models.StorageAuthType :ivar connection_string: :vartype connection_string: str :ivar sas_token: :vartype sas_token: str :ivar account_name: :vartype account_name: str """ _attribute_map = { 'storage_auth_type': {'key': 'storageAuthType', 'type': 'str'}, 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'sas_token': {'key': 'sasToken', 'type': 'str'}, 'account_name': {'key': 'accountName', 'type': 'str'}, } def __init__( self, *, storage_auth_type: Optional[Union[str, "StorageAuthType"]] = None, connection_string: Optional[str] = None, sas_token: Optional[str] = None, account_name: Optional[str] = None, **kwargs ): """ :keyword storage_auth_type: Possible values include: "MSI", "ConnectionString", "SAS". :paramtype storage_auth_type: str or ~flow.models.StorageAuthType :keyword connection_string: :paramtype connection_string: str :keyword sas_token: :paramtype sas_token: str :keyword account_name: :paramtype account_name: str """ super(StorageInfo, self).__init__(**kwargs) self.storage_auth_type = storage_auth_type self.connection_string = connection_string self.sas_token = sas_token self.account_name = account_name class StoredProcedureParameter(msrest.serialization.Model): """StoredProcedureParameter. :ivar name: :vartype name: str :ivar value: :vartype value: str :ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date". :vartype type: str or ~flow.models.StoredProcedureParameterType """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, value: Optional[str] = None, type: Optional[Union[str, "StoredProcedureParameterType"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword value: :paramtype value: str :keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date". :paramtype type: str or ~flow.models.StoredProcedureParameterType """ super(StoredProcedureParameter, self).__init__(**kwargs) self.name = name self.value = value self.type = type class Stream(msrest.serialization.Model): """Stream. Variables are only populated by the server, and will be ignored when sending a request. :ivar can_read: :vartype can_read: bool :ivar can_write: :vartype can_write: bool :ivar can_seek: :vartype can_seek: bool :ivar can_timeout: :vartype can_timeout: bool :ivar length: :vartype length: long :ivar position: :vartype position: long :ivar read_timeout: :vartype read_timeout: int :ivar write_timeout: :vartype write_timeout: int """ _validation = { 'can_read': {'readonly': True}, 'can_write': {'readonly': True}, 'can_seek': {'readonly': True}, 'can_timeout': {'readonly': True}, 'length': {'readonly': True}, } _attribute_map = { 'can_read': {'key': 'canRead', 'type': 'bool'}, 'can_write': {'key': 'canWrite', 'type': 'bool'}, 'can_seek': {'key': 'canSeek', 'type': 'bool'}, 'can_timeout': {'key': 'canTimeout', 'type': 'bool'}, 'length': {'key': 'length', 'type': 'long'}, 'position': {'key': 'position', 'type': 'long'}, 'read_timeout': {'key': 'readTimeout', 'type': 'int'}, 'write_timeout': {'key': 'writeTimeout', 'type': 'int'}, } def __init__( self, *, position: Optional[int] = None, read_timeout: Optional[int] = None, write_timeout: Optional[int] = None, **kwargs ): """ :keyword position: :paramtype position: long :keyword read_timeout: :paramtype read_timeout: int :keyword write_timeout: :paramtype write_timeout: int """ super(Stream, self).__init__(**kwargs) self.can_read = None self.can_write = None self.can_seek = None self.can_timeout = None self.length = None self.position = position self.read_timeout = read_timeout self.write_timeout = write_timeout class StructuredInterface(msrest.serialization.Model): """StructuredInterface. :ivar command_line_pattern: :vartype command_line_pattern: str :ivar inputs: :vartype inputs: list[~flow.models.StructuredInterfaceInput] :ivar outputs: :vartype outputs: list[~flow.models.StructuredInterfaceOutput] :ivar control_outputs: :vartype control_outputs: list[~flow.models.ControlOutput] :ivar parameters: :vartype parameters: list[~flow.models.StructuredInterfaceParameter] :ivar metadata_parameters: :vartype metadata_parameters: list[~flow.models.StructuredInterfaceParameter] :ivar arguments: :vartype arguments: list[~flow.models.ArgumentAssignment] """ _attribute_map = { 'command_line_pattern': {'key': 'commandLinePattern', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '[StructuredInterfaceInput]'}, 'outputs': {'key': 'outputs', 'type': '[StructuredInterfaceOutput]'}, 'control_outputs': {'key': 'controlOutputs', 'type': '[ControlOutput]'}, 'parameters': {'key': 'parameters', 'type': '[StructuredInterfaceParameter]'}, 'metadata_parameters': {'key': 'metadataParameters', 'type': '[StructuredInterfaceParameter]'}, 'arguments': {'key': 'arguments', 'type': '[ArgumentAssignment]'}, } def __init__( self, *, command_line_pattern: Optional[str] = None, inputs: Optional[List["StructuredInterfaceInput"]] = None, outputs: Optional[List["StructuredInterfaceOutput"]] = None, control_outputs: Optional[List["ControlOutput"]] = None, parameters: Optional[List["StructuredInterfaceParameter"]] = None, metadata_parameters: Optional[List["StructuredInterfaceParameter"]] = None, arguments: Optional[List["ArgumentAssignment"]] = None, **kwargs ): """ :keyword command_line_pattern: :paramtype command_line_pattern: str :keyword inputs: :paramtype inputs: list[~flow.models.StructuredInterfaceInput] :keyword outputs: :paramtype outputs: list[~flow.models.StructuredInterfaceOutput] :keyword control_outputs: :paramtype control_outputs: list[~flow.models.ControlOutput] :keyword parameters: :paramtype parameters: list[~flow.models.StructuredInterfaceParameter] :keyword metadata_parameters: :paramtype metadata_parameters: list[~flow.models.StructuredInterfaceParameter] :keyword arguments: :paramtype arguments: list[~flow.models.ArgumentAssignment] """ super(StructuredInterface, self).__init__(**kwargs) self.command_line_pattern = command_line_pattern self.inputs = inputs self.outputs = outputs self.control_outputs = control_outputs self.parameters = parameters self.metadata_parameters = metadata_parameters self.arguments = arguments class StructuredInterfaceInput(msrest.serialization.Model): """StructuredInterfaceInput. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar data_type_ids_list: :vartype data_type_ids_list: list[str] :ivar is_optional: :vartype is_optional: bool :ivar description: :vartype description: str :ivar skip_processing: :vartype skip_processing: bool :ivar is_resource: :vartype is_resource: bool :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool :ivar data_reference_name: :vartype data_reference_name: str :ivar dataset_types: :vartype dataset_types: list[str or ~flow.models.DatasetType] """ _validation = { 'dataset_types': {'unique': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'data_type_ids_list': {'key': 'dataTypeIdsList', 'type': '[str]'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'skip_processing': {'key': 'skipProcessing', 'type': 'bool'}, 'is_resource': {'key': 'isResource', 'type': 'bool'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'}, 'dataset_types': {'key': 'datasetTypes', 'type': '[str]'}, } def __init__( self, *, name: Optional[str] = None, label: Optional[str] = None, data_type_ids_list: Optional[List[str]] = None, is_optional: Optional[bool] = None, description: Optional[str] = None, skip_processing: Optional[bool] = None, is_resource: Optional[bool] = None, data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None, path_on_compute: Optional[str] = None, overwrite: Optional[bool] = None, data_reference_name: Optional[str] = None, dataset_types: Optional[List[Union[str, "DatasetType"]]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword data_type_ids_list: :paramtype data_type_ids_list: list[str] :keyword is_optional: :paramtype is_optional: bool :keyword description: :paramtype description: str :keyword skip_processing: :paramtype skip_processing: bool :keyword is_resource: :paramtype is_resource: bool :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool :keyword data_reference_name: :paramtype data_reference_name: str :keyword dataset_types: :paramtype dataset_types: list[str or ~flow.models.DatasetType] """ super(StructuredInterfaceInput, self).__init__(**kwargs) self.name = name self.label = label self.data_type_ids_list = data_type_ids_list self.is_optional = is_optional self.description = description self.skip_processing = skip_processing self.is_resource = is_resource self.data_store_mode = data_store_mode self.path_on_compute = path_on_compute self.overwrite = overwrite self.data_reference_name = data_reference_name self.dataset_types = dataset_types class StructuredInterfaceOutput(msrest.serialization.Model): """StructuredInterfaceOutput. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar data_type_id: :vartype data_type_id: str :ivar pass_through_data_type_input_name: :vartype pass_through_data_type_input_name: str :ivar description: :vartype description: str :ivar skip_processing: :vartype skip_processing: bool :ivar is_artifact: :vartype is_artifact: bool :ivar data_store_name: :vartype data_store_name: str :ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :vartype data_store_mode: str or ~flow.models.AEVADataStoreMode :ivar path_on_compute: :vartype path_on_compute: str :ivar overwrite: :vartype overwrite: bool :ivar data_reference_name: :vartype data_reference_name: str :ivar training_output: :vartype training_output: ~flow.models.TrainingOutput :ivar dataset_output: :vartype dataset_output: ~flow.models.DatasetOutput :ivar asset_output_settings: :vartype asset_output_settings: ~flow.models.AssetOutputSettings :ivar early_available: :vartype early_available: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'data_type_id': {'key': 'dataTypeId', 'type': 'str'}, 'pass_through_data_type_input_name': {'key': 'passThroughDataTypeInputName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'skip_processing': {'key': 'skipProcessing', 'type': 'bool'}, 'is_artifact': {'key': 'IsArtifact', 'type': 'bool'}, 'data_store_name': {'key': 'dataStoreName', 'type': 'str'}, 'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'}, 'training_output': {'key': 'trainingOutput', 'type': 'TrainingOutput'}, 'dataset_output': {'key': 'datasetOutput', 'type': 'DatasetOutput'}, 'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AssetOutputSettings'}, 'early_available': {'key': 'EarlyAvailable', 'type': 'bool'}, } def __init__( self, *, name: Optional[str] = None, label: Optional[str] = None, data_type_id: Optional[str] = None, pass_through_data_type_input_name: Optional[str] = None, description: Optional[str] = None, skip_processing: Optional[bool] = None, is_artifact: Optional[bool] = None, data_store_name: Optional[str] = None, data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None, path_on_compute: Optional[str] = None, overwrite: Optional[bool] = None, data_reference_name: Optional[str] = None, training_output: Optional["TrainingOutput"] = None, dataset_output: Optional["DatasetOutput"] = None, asset_output_settings: Optional["AssetOutputSettings"] = None, early_available: Optional[bool] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword data_type_id: :paramtype data_type_id: str :keyword pass_through_data_type_input_name: :paramtype pass_through_data_type_input_name: str :keyword description: :paramtype description: str :keyword skip_processing: :paramtype skip_processing: bool :keyword is_artifact: :paramtype is_artifact: bool :keyword data_store_name: :paramtype data_store_name: str :keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload", "Direct", "Hdfs", "Link". :paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode :keyword path_on_compute: :paramtype path_on_compute: str :keyword overwrite: :paramtype overwrite: bool :keyword data_reference_name: :paramtype data_reference_name: str :keyword training_output: :paramtype training_output: ~flow.models.TrainingOutput :keyword dataset_output: :paramtype dataset_output: ~flow.models.DatasetOutput :keyword asset_output_settings: :paramtype asset_output_settings: ~flow.models.AssetOutputSettings :keyword early_available: :paramtype early_available: bool """ super(StructuredInterfaceOutput, self).__init__(**kwargs) self.name = name self.label = label self.data_type_id = data_type_id self.pass_through_data_type_input_name = pass_through_data_type_input_name self.description = description self.skip_processing = skip_processing self.is_artifact = is_artifact self.data_store_name = data_store_name self.data_store_mode = data_store_mode self.path_on_compute = path_on_compute self.overwrite = overwrite self.data_reference_name = data_reference_name self.training_output = training_output self.dataset_output = dataset_output self.asset_output_settings = asset_output_settings self.early_available = early_available class StructuredInterfaceParameter(msrest.serialization.Model): """StructuredInterfaceParameter. :ivar name: :vartype name: str :ivar label: :vartype label: str :ivar parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined". :vartype parameter_type: str or ~flow.models.ParameterType :ivar is_optional: :vartype is_optional: bool :ivar default_value: :vartype default_value: str :ivar lower_bound: :vartype lower_bound: str :ivar upper_bound: :vartype upper_bound: str :ivar enum_values: :vartype enum_values: list[str] :ivar enum_values_to_argument_strings: This is a dictionary. :vartype enum_values_to_argument_strings: dict[str, str] :ivar description: :vartype description: str :ivar set_environment_variable: :vartype set_environment_variable: bool :ivar environment_variable_override: :vartype environment_variable_override: str :ivar enabled_by_parameter_name: :vartype enabled_by_parameter_name: str :ivar enabled_by_parameter_values: :vartype enabled_by_parameter_values: list[str] :ivar ui_hint: :vartype ui_hint: ~flow.models.UIParameterHint :ivar group_names: :vartype group_names: list[str] :ivar argument_name: :vartype argument_name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'parameter_type': {'key': 'parameterType', 'type': 'str'}, 'is_optional': {'key': 'isOptional', 'type': 'bool'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, 'lower_bound': {'key': 'lowerBound', 'type': 'str'}, 'upper_bound': {'key': 'upperBound', 'type': 'str'}, 'enum_values': {'key': 'enumValues', 'type': '[str]'}, 'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'}, 'description': {'key': 'description', 'type': 'str'}, 'set_environment_variable': {'key': 'setEnvironmentVariable', 'type': 'bool'}, 'environment_variable_override': {'key': 'environmentVariableOverride', 'type': 'str'}, 'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'}, 'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'}, 'ui_hint': {'key': 'uiHint', 'type': 'UIParameterHint'}, 'group_names': {'key': 'groupNames', 'type': '[str]'}, 'argument_name': {'key': 'argumentName', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, label: Optional[str] = None, parameter_type: Optional[Union[str, "ParameterType"]] = None, is_optional: Optional[bool] = None, default_value: Optional[str] = None, lower_bound: Optional[str] = None, upper_bound: Optional[str] = None, enum_values: Optional[List[str]] = None, enum_values_to_argument_strings: Optional[Dict[str, str]] = None, description: Optional[str] = None, set_environment_variable: Optional[bool] = None, environment_variable_override: Optional[str] = None, enabled_by_parameter_name: Optional[str] = None, enabled_by_parameter_values: Optional[List[str]] = None, ui_hint: Optional["UIParameterHint"] = None, group_names: Optional[List[str]] = None, argument_name: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword label: :paramtype label: str :keyword parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined". :paramtype parameter_type: str or ~flow.models.ParameterType :keyword is_optional: :paramtype is_optional: bool :keyword default_value: :paramtype default_value: str :keyword lower_bound: :paramtype lower_bound: str :keyword upper_bound: :paramtype upper_bound: str :keyword enum_values: :paramtype enum_values: list[str] :keyword enum_values_to_argument_strings: This is a dictionary. :paramtype enum_values_to_argument_strings: dict[str, str] :keyword description: :paramtype description: str :keyword set_environment_variable: :paramtype set_environment_variable: bool :keyword environment_variable_override: :paramtype environment_variable_override: str :keyword enabled_by_parameter_name: :paramtype enabled_by_parameter_name: str :keyword enabled_by_parameter_values: :paramtype enabled_by_parameter_values: list[str] :keyword ui_hint: :paramtype ui_hint: ~flow.models.UIParameterHint :keyword group_names: :paramtype group_names: list[str] :keyword argument_name: :paramtype argument_name: str """ super(StructuredInterfaceParameter, self).__init__(**kwargs) self.name = name self.label = label self.parameter_type = parameter_type self.is_optional = is_optional self.default_value = default_value self.lower_bound = lower_bound self.upper_bound = upper_bound self.enum_values = enum_values self.enum_values_to_argument_strings = enum_values_to_argument_strings self.description = description self.set_environment_variable = set_environment_variable self.environment_variable_override = environment_variable_override self.enabled_by_parameter_name = enabled_by_parameter_name self.enabled_by_parameter_values = enabled_by_parameter_values self.ui_hint = ui_hint self.group_names = group_names self.argument_name = argument_name class StudioMigrationInfo(msrest.serialization.Model): """StudioMigrationInfo. Variables are only populated by the server, and will be ignored when sending a request. :ivar source_workspace_id: :vartype source_workspace_id: str :ivar source_experiment_id: :vartype source_experiment_id: str :ivar source_experiment_link: :vartype source_experiment_link: str :ivar failed_node_id_list: :vartype failed_node_id_list: list[str] :ivar error_message: :vartype error_message: str """ _validation = { 'error_message': {'readonly': True}, } _attribute_map = { 'source_workspace_id': {'key': 'sourceWorkspaceId', 'type': 'str'}, 'source_experiment_id': {'key': 'sourceExperimentId', 'type': 'str'}, 'source_experiment_link': {'key': 'sourceExperimentLink', 'type': 'str'}, 'failed_node_id_list': {'key': 'failedNodeIdList', 'type': '[str]'}, 'error_message': {'key': 'errorMessage', 'type': 'str'}, } def __init__( self, *, source_workspace_id: Optional[str] = None, source_experiment_id: Optional[str] = None, source_experiment_link: Optional[str] = None, failed_node_id_list: Optional[List[str]] = None, **kwargs ): """ :keyword source_workspace_id: :paramtype source_workspace_id: str :keyword source_experiment_id: :paramtype source_experiment_id: str :keyword source_experiment_link: :paramtype source_experiment_link: str :keyword failed_node_id_list: :paramtype failed_node_id_list: list[str] """ super(StudioMigrationInfo, self).__init__(**kwargs) self.source_workspace_id = source_workspace_id self.source_experiment_id = source_experiment_id self.source_experiment_link = source_experiment_link self.failed_node_id_list = failed_node_id_list self.error_message = None class SubGraphConcatenateAssignment(msrest.serialization.Model): """SubGraphConcatenateAssignment. :ivar concatenate_parameter: :vartype concatenate_parameter: list[~flow.models.ParameterAssignment] :ivar parameter_assignments: :vartype parameter_assignments: ~flow.models.SubPipelineParameterAssignment """ _attribute_map = { 'concatenate_parameter': {'key': 'concatenateParameter', 'type': '[ParameterAssignment]'}, 'parameter_assignments': {'key': 'parameterAssignments', 'type': 'SubPipelineParameterAssignment'}, } def __init__( self, *, concatenate_parameter: Optional[List["ParameterAssignment"]] = None, parameter_assignments: Optional["SubPipelineParameterAssignment"] = None, **kwargs ): """ :keyword concatenate_parameter: :paramtype concatenate_parameter: list[~flow.models.ParameterAssignment] :keyword parameter_assignments: :paramtype parameter_assignments: ~flow.models.SubPipelineParameterAssignment """ super(SubGraphConcatenateAssignment, self).__init__(**kwargs) self.concatenate_parameter = concatenate_parameter self.parameter_assignments = parameter_assignments class SubGraphConfiguration(msrest.serialization.Model): """SubGraphConfiguration. :ivar graph_id: :vartype graph_id: str :ivar graph_draft_id: :vartype graph_draft_id: str :ivar default_cloud_priority: :vartype default_cloud_priority: ~flow.models.CloudPrioritySetting :ivar is_dynamic: :vartype is_dynamic: bool """ _attribute_map = { 'graph_id': {'key': 'graphId', 'type': 'str'}, 'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'}, 'default_cloud_priority': {'key': 'DefaultCloudPriority', 'type': 'CloudPrioritySetting'}, 'is_dynamic': {'key': 'IsDynamic', 'type': 'bool'}, } def __init__( self, *, graph_id: Optional[str] = None, graph_draft_id: Optional[str] = None, default_cloud_priority: Optional["CloudPrioritySetting"] = None, is_dynamic: Optional[bool] = False, **kwargs ): """ :keyword graph_id: :paramtype graph_id: str :keyword graph_draft_id: :paramtype graph_draft_id: str :keyword default_cloud_priority: :paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting :keyword is_dynamic: :paramtype is_dynamic: bool """ super(SubGraphConfiguration, self).__init__(**kwargs) self.graph_id = graph_id self.graph_draft_id = graph_draft_id self.default_cloud_priority = default_cloud_priority self.is_dynamic = is_dynamic class SubGraphConnectionInfo(msrest.serialization.Model): """SubGraphConnectionInfo. :ivar node_id: :vartype node_id: str :ivar port_name: :vartype port_name: str """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, } def __init__( self, *, node_id: Optional[str] = None, port_name: Optional[str] = None, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword port_name: :paramtype port_name: str """ super(SubGraphConnectionInfo, self).__init__(**kwargs) self.node_id = node_id self.port_name = port_name class SubGraphDataPathParameterAssignment(msrest.serialization.Model): """SubGraphDataPathParameterAssignment. :ivar data_set_path_parameter: :vartype data_set_path_parameter: ~flow.models.DataSetPathParameter :ivar data_set_path_parameter_assignments: :vartype data_set_path_parameter_assignments: list[str] """ _attribute_map = { 'data_set_path_parameter': {'key': 'dataSetPathParameter', 'type': 'DataSetPathParameter'}, 'data_set_path_parameter_assignments': {'key': 'dataSetPathParameterAssignments', 'type': '[str]'}, } def __init__( self, *, data_set_path_parameter: Optional["DataSetPathParameter"] = None, data_set_path_parameter_assignments: Optional[List[str]] = None, **kwargs ): """ :keyword data_set_path_parameter: :paramtype data_set_path_parameter: ~flow.models.DataSetPathParameter :keyword data_set_path_parameter_assignments: :paramtype data_set_path_parameter_assignments: list[str] """ super(SubGraphDataPathParameterAssignment, self).__init__(**kwargs) self.data_set_path_parameter = data_set_path_parameter self.data_set_path_parameter_assignments = data_set_path_parameter_assignments class SubGraphInfo(msrest.serialization.Model): """SubGraphInfo. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar default_compute_target: :vartype default_compute_target: ~flow.models.ComputeSetting :ivar default_data_store: :vartype default_data_store: ~flow.models.DatastoreSetting :ivar id: :vartype id: str :ivar parent_graph_id: :vartype parent_graph_id: str :ivar pipeline_definition_id: :vartype pipeline_definition_id: str :ivar sub_graph_parameter_assignment: :vartype sub_graph_parameter_assignment: list[~flow.models.SubGraphParameterAssignment] :ivar sub_graph_concatenate_assignment: :vartype sub_graph_concatenate_assignment: list[~flow.models.SubGraphConcatenateAssignment] :ivar sub_graph_data_path_parameter_assignment: :vartype sub_graph_data_path_parameter_assignment: list[~flow.models.SubGraphDataPathParameterAssignment] :ivar sub_graph_default_compute_target_nodes: :vartype sub_graph_default_compute_target_nodes: list[str] :ivar sub_graph_default_data_store_nodes: :vartype sub_graph_default_data_store_nodes: list[str] :ivar inputs: :vartype inputs: list[~flow.models.SubGraphPortInfo] :ivar outputs: :vartype outputs: list[~flow.models.SubGraphPortInfo] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'default_compute_target': {'key': 'defaultComputeTarget', 'type': 'ComputeSetting'}, 'default_data_store': {'key': 'defaultDataStore', 'type': 'DatastoreSetting'}, 'id': {'key': 'id', 'type': 'str'}, 'parent_graph_id': {'key': 'parentGraphId', 'type': 'str'}, 'pipeline_definition_id': {'key': 'pipelineDefinitionId', 'type': 'str'}, 'sub_graph_parameter_assignment': {'key': 'subGraphParameterAssignment', 'type': '[SubGraphParameterAssignment]'}, 'sub_graph_concatenate_assignment': {'key': 'subGraphConcatenateAssignment', 'type': '[SubGraphConcatenateAssignment]'}, 'sub_graph_data_path_parameter_assignment': {'key': 'subGraphDataPathParameterAssignment', 'type': '[SubGraphDataPathParameterAssignment]'}, 'sub_graph_default_compute_target_nodes': {'key': 'subGraphDefaultComputeTargetNodes', 'type': '[str]'}, 'sub_graph_default_data_store_nodes': {'key': 'subGraphDefaultDataStoreNodes', 'type': '[str]'}, 'inputs': {'key': 'inputs', 'type': '[SubGraphPortInfo]'}, 'outputs': {'key': 'outputs', 'type': '[SubGraphPortInfo]'}, } def __init__( self, *, name: Optional[str] = None, description: Optional[str] = None, default_compute_target: Optional["ComputeSetting"] = None, default_data_store: Optional["DatastoreSetting"] = None, id: Optional[str] = None, parent_graph_id: Optional[str] = None, pipeline_definition_id: Optional[str] = None, sub_graph_parameter_assignment: Optional[List["SubGraphParameterAssignment"]] = None, sub_graph_concatenate_assignment: Optional[List["SubGraphConcatenateAssignment"]] = None, sub_graph_data_path_parameter_assignment: Optional[List["SubGraphDataPathParameterAssignment"]] = None, sub_graph_default_compute_target_nodes: Optional[List[str]] = None, sub_graph_default_data_store_nodes: Optional[List[str]] = None, inputs: Optional[List["SubGraphPortInfo"]] = None, outputs: Optional[List["SubGraphPortInfo"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword default_compute_target: :paramtype default_compute_target: ~flow.models.ComputeSetting :keyword default_data_store: :paramtype default_data_store: ~flow.models.DatastoreSetting :keyword id: :paramtype id: str :keyword parent_graph_id: :paramtype parent_graph_id: str :keyword pipeline_definition_id: :paramtype pipeline_definition_id: str :keyword sub_graph_parameter_assignment: :paramtype sub_graph_parameter_assignment: list[~flow.models.SubGraphParameterAssignment] :keyword sub_graph_concatenate_assignment: :paramtype sub_graph_concatenate_assignment: list[~flow.models.SubGraphConcatenateAssignment] :keyword sub_graph_data_path_parameter_assignment: :paramtype sub_graph_data_path_parameter_assignment: list[~flow.models.SubGraphDataPathParameterAssignment] :keyword sub_graph_default_compute_target_nodes: :paramtype sub_graph_default_compute_target_nodes: list[str] :keyword sub_graph_default_data_store_nodes: :paramtype sub_graph_default_data_store_nodes: list[str] :keyword inputs: :paramtype inputs: list[~flow.models.SubGraphPortInfo] :keyword outputs: :paramtype outputs: list[~flow.models.SubGraphPortInfo] """ super(SubGraphInfo, self).__init__(**kwargs) self.name = name self.description = description self.default_compute_target = default_compute_target self.default_data_store = default_data_store self.id = id self.parent_graph_id = parent_graph_id self.pipeline_definition_id = pipeline_definition_id self.sub_graph_parameter_assignment = sub_graph_parameter_assignment self.sub_graph_concatenate_assignment = sub_graph_concatenate_assignment self.sub_graph_data_path_parameter_assignment = sub_graph_data_path_parameter_assignment self.sub_graph_default_compute_target_nodes = sub_graph_default_compute_target_nodes self.sub_graph_default_data_store_nodes = sub_graph_default_data_store_nodes self.inputs = inputs self.outputs = outputs class SubGraphParameterAssignment(msrest.serialization.Model): """SubGraphParameterAssignment. :ivar parameter: :vartype parameter: ~flow.models.Parameter :ivar parameter_assignments: :vartype parameter_assignments: list[~flow.models.SubPipelineParameterAssignment] """ _attribute_map = { 'parameter': {'key': 'parameter', 'type': 'Parameter'}, 'parameter_assignments': {'key': 'parameterAssignments', 'type': '[SubPipelineParameterAssignment]'}, } def __init__( self, *, parameter: Optional["Parameter"] = None, parameter_assignments: Optional[List["SubPipelineParameterAssignment"]] = None, **kwargs ): """ :keyword parameter: :paramtype parameter: ~flow.models.Parameter :keyword parameter_assignments: :paramtype parameter_assignments: list[~flow.models.SubPipelineParameterAssignment] """ super(SubGraphParameterAssignment, self).__init__(**kwargs) self.parameter = parameter self.parameter_assignments = parameter_assignments class SubGraphPortInfo(msrest.serialization.Model): """SubGraphPortInfo. :ivar name: :vartype name: str :ivar internal: :vartype internal: list[~flow.models.SubGraphConnectionInfo] :ivar external: :vartype external: list[~flow.models.SubGraphConnectionInfo] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'internal': {'key': 'internal', 'type': '[SubGraphConnectionInfo]'}, 'external': {'key': 'external', 'type': '[SubGraphConnectionInfo]'}, } def __init__( self, *, name: Optional[str] = None, internal: Optional[List["SubGraphConnectionInfo"]] = None, external: Optional[List["SubGraphConnectionInfo"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword internal: :paramtype internal: list[~flow.models.SubGraphConnectionInfo] :keyword external: :paramtype external: list[~flow.models.SubGraphConnectionInfo] """ super(SubGraphPortInfo, self).__init__(**kwargs) self.name = name self.internal = internal self.external = external class SubmitBulkRunRequest(msrest.serialization.Model): """SubmitBulkRunRequest. :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar flow_definition_resource_id: :vartype flow_definition_resource_id: str :ivar flow_definition_data_store_name: :vartype flow_definition_data_store_name: str :ivar flow_definition_blob_path: :vartype flow_definition_blob_path: str :ivar flow_definition_data_uri: :vartype flow_definition_data_uri: str :ivar run_id: :vartype run_id: str :ivar run_display_name: :vartype run_display_name: str :ivar run_experiment_name: :vartype run_experiment_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar node_variant: :vartype node_variant: str :ivar variant_run_id: :vartype variant_run_id: str :ivar baseline_run_id: :vartype baseline_run_id: str :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar inputs_mapping: This is a dictionary. :vartype inputs_mapping: dict[str, str] :ivar connections: This is a dictionary. :vartype connections: dict[str, dict[str, str]] :ivar environment_variables: This is a dictionary. :vartype environment_variables: dict[str, str] :ivar aml_compute_name: :vartype aml_compute_name: str :ivar runtime_name: :vartype runtime_name: str :ivar session_id: :vartype session_id: str :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar session_setup_mode: Possible values include: "ClientWait", "SystemWait". :vartype session_setup_mode: str or ~flow.models.SessionSetupModeEnum :ivar output_data_store: :vartype output_data_store: str :ivar flow_lineage_id: :vartype flow_lineage_id: str :ivar run_display_name_generation_type: Possible values include: "AutoAppend", "UserProvidedMacro". :vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType """ _attribute_map = { 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'flow_definition_resource_id': {'key': 'flowDefinitionResourceId', 'type': 'str'}, 'flow_definition_data_store_name': {'key': 'flowDefinitionDataStoreName', 'type': 'str'}, 'flow_definition_blob_path': {'key': 'flowDefinitionBlobPath', 'type': 'str'}, 'flow_definition_data_uri': {'key': 'flowDefinitionDataUri', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'run_display_name': {'key': 'runDisplayName', 'type': 'str'}, 'run_experiment_name': {'key': 'runExperimentName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'node_variant': {'key': 'nodeVariant', 'type': 'str'}, 'variant_run_id': {'key': 'variantRunId', 'type': 'str'}, 'baseline_run_id': {'key': 'baselineRunId', 'type': 'str'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'}, 'connections': {'key': 'connections', 'type': '{{str}}'}, 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, 'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'session_setup_mode': {'key': 'sessionSetupMode', 'type': 'str'}, 'output_data_store': {'key': 'outputDataStore', 'type': 'str'}, 'flow_lineage_id': {'key': 'flowLineageId', 'type': 'str'}, 'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'}, } def __init__( self, *, flow_definition_file_path: Optional[str] = None, flow_definition_resource_id: Optional[str] = None, flow_definition_data_store_name: Optional[str] = None, flow_definition_blob_path: Optional[str] = None, flow_definition_data_uri: Optional[str] = None, run_id: Optional[str] = None, run_display_name: Optional[str] = None, run_experiment_name: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, node_variant: Optional[str] = None, variant_run_id: Optional[str] = None, baseline_run_id: Optional[str] = None, batch_data_input: Optional["BatchDataInput"] = None, inputs_mapping: Optional[Dict[str, str]] = None, connections: Optional[Dict[str, Dict[str, str]]] = None, environment_variables: Optional[Dict[str, str]] = None, aml_compute_name: Optional[str] = None, runtime_name: Optional[str] = None, session_id: Optional[str] = None, vm_size: Optional[str] = None, max_idle_time_seconds: Optional[int] = None, session_setup_mode: Optional[Union[str, "SessionSetupModeEnum"]] = None, output_data_store: Optional[str] = None, flow_lineage_id: Optional[str] = None, run_display_name_generation_type: Optional[Union[str, "RunDisplayNameGenerationType"]] = None, **kwargs ): """ :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword flow_definition_resource_id: :paramtype flow_definition_resource_id: str :keyword flow_definition_data_store_name: :paramtype flow_definition_data_store_name: str :keyword flow_definition_blob_path: :paramtype flow_definition_blob_path: str :keyword flow_definition_data_uri: :paramtype flow_definition_data_uri: str :keyword run_id: :paramtype run_id: str :keyword run_display_name: :paramtype run_display_name: str :keyword run_experiment_name: :paramtype run_experiment_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword node_variant: :paramtype node_variant: str :keyword variant_run_id: :paramtype variant_run_id: str :keyword baseline_run_id: :paramtype baseline_run_id: str :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword inputs_mapping: This is a dictionary. :paramtype inputs_mapping: dict[str, str] :keyword connections: This is a dictionary. :paramtype connections: dict[str, dict[str, str]] :keyword environment_variables: This is a dictionary. :paramtype environment_variables: dict[str, str] :keyword aml_compute_name: :paramtype aml_compute_name: str :keyword runtime_name: :paramtype runtime_name: str :keyword session_id: :paramtype session_id: str :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword session_setup_mode: Possible values include: "ClientWait", "SystemWait". :paramtype session_setup_mode: str or ~flow.models.SessionSetupModeEnum :keyword output_data_store: :paramtype output_data_store: str :keyword flow_lineage_id: :paramtype flow_lineage_id: str :keyword run_display_name_generation_type: Possible values include: "AutoAppend", "UserProvidedMacro". :paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType """ super(SubmitBulkRunRequest, self).__init__(**kwargs) self.flow_definition_file_path = flow_definition_file_path self.flow_definition_resource_id = flow_definition_resource_id self.flow_definition_data_store_name = flow_definition_data_store_name self.flow_definition_blob_path = flow_definition_blob_path self.flow_definition_data_uri = flow_definition_data_uri self.run_id = run_id self.run_display_name = run_display_name self.run_experiment_name = run_experiment_name self.description = description self.tags = tags self.properties = properties self.node_variant = node_variant self.variant_run_id = variant_run_id self.baseline_run_id = baseline_run_id self.batch_data_input = batch_data_input self.inputs_mapping = inputs_mapping self.connections = connections self.environment_variables = environment_variables self.aml_compute_name = aml_compute_name self.runtime_name = runtime_name self.session_id = session_id self.vm_size = vm_size self.max_idle_time_seconds = max_idle_time_seconds self.session_setup_mode = session_setup_mode self.output_data_store = output_data_store self.flow_lineage_id = flow_lineage_id self.run_display_name_generation_type = run_display_name_generation_type class SubmitBulkRunResponse(msrest.serialization.Model): """SubmitBulkRunResponse. :ivar next_action_interval_in_seconds: :vartype next_action_interval_in_seconds: int :ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent". :vartype action_type: str or ~flow.models.ActionType :ivar flow_runs: :vartype flow_runs: list[any] :ivar node_runs: :vartype node_runs: list[any] :ivar error_response: The error response. :vartype error_response: ~flow.models.ErrorResponse :ivar flow_name: :vartype flow_name: str :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar flow_run_id: :vartype flow_run_id: str :ivar flow_graph: :vartype flow_graph: ~flow.models.FlowGraph :ivar flow_graph_layout: :vartype flow_graph_layout: ~flow.models.FlowGraphLayout :ivar flow_run_resource_id: :vartype flow_run_resource_id: str :ivar bulk_test_id: :vartype bulk_test_id: str :ivar batch_inputs: :vartype batch_inputs: list[dict[str, any]] :ivar batch_data_input: :vartype batch_data_input: ~flow.models.BatchDataInput :ivar created_by: :vartype created_by: ~flow.models.SchemaContractsCreatedBy :ivar created_on: :vartype created_on: ~datetime.datetime :ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun". :vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar runtime_name: :vartype runtime_name: str :ivar aml_compute_name: :vartype aml_compute_name: str :ivar flow_run_logs: Dictionary of :code:`<string>`. :vartype flow_run_logs: dict[str, str] :ivar flow_test_mode: Possible values include: "Sync", "Async". :vartype flow_test_mode: str or ~flow.models.FlowTestMode :ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :ivar working_directory: :vartype working_directory: str :ivar flow_dag_file_relative_path: :vartype flow_dag_file_relative_path: str :ivar flow_snapshot_id: :vartype flow_snapshot_id: str :ivar variant_run_to_evaluation_runs_id_mapping: Dictionary of <componentsΒ·1mlssi7Β·schemasΒ·submitbulkrunresponseΒ·propertiesΒ·variantruntoevaluationrunsidmappingΒ·additionalproperties>. :vartype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]] """ _attribute_map = { 'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'}, 'action_type': {'key': 'actionType', 'type': 'str'}, 'flow_runs': {'key': 'flow_runs', 'type': '[object]'}, 'node_runs': {'key': 'node_runs', 'type': '[object]'}, 'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'flow_run_id': {'key': 'flowRunId', 'type': 'str'}, 'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'}, 'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'}, 'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'}, 'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'}, 'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'}, 'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'}, 'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'flow_run_type': {'key': 'flowRunType', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'runtime_name': {'key': 'runtimeName', 'type': 'str'}, 'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'}, 'flow_run_logs': {'key': 'flowRunLogs', 'type': '{str}'}, 'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'}, 'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'}, 'working_directory': {'key': 'workingDirectory', 'type': 'str'}, 'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'}, 'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'}, 'variant_run_to_evaluation_runs_id_mapping': {'key': 'variantRunToEvaluationRunsIdMapping', 'type': '{[str]}'}, } def __init__( self, *, next_action_interval_in_seconds: Optional[int] = None, action_type: Optional[Union[str, "ActionType"]] = None, flow_runs: Optional[List[Any]] = None, node_runs: Optional[List[Any]] = None, error_response: Optional["ErrorResponse"] = None, flow_name: Optional[str] = None, flow_run_display_name: Optional[str] = None, flow_run_id: Optional[str] = None, flow_graph: Optional["FlowGraph"] = None, flow_graph_layout: Optional["FlowGraphLayout"] = None, flow_run_resource_id: Optional[str] = None, bulk_test_id: Optional[str] = None, batch_inputs: Optional[List[Dict[str, Any]]] = None, batch_data_input: Optional["BatchDataInput"] = None, created_by: Optional["SchemaContractsCreatedBy"] = None, created_on: Optional[datetime.datetime] = None, flow_run_type: Optional[Union[str, "FlowRunTypeEnum"]] = None, flow_type: Optional[Union[str, "FlowType"]] = None, runtime_name: Optional[str] = None, aml_compute_name: Optional[str] = None, flow_run_logs: Optional[Dict[str, str]] = None, flow_test_mode: Optional[Union[str, "FlowTestMode"]] = None, flow_test_infos: Optional[Dict[str, "FlowTestInfo"]] = None, working_directory: Optional[str] = None, flow_dag_file_relative_path: Optional[str] = None, flow_snapshot_id: Optional[str] = None, variant_run_to_evaluation_runs_id_mapping: Optional[Dict[str, List[str]]] = None, **kwargs ): """ :keyword next_action_interval_in_seconds: :paramtype next_action_interval_in_seconds: int :keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus", "SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent". :paramtype action_type: str or ~flow.models.ActionType :keyword flow_runs: :paramtype flow_runs: list[any] :keyword node_runs: :paramtype node_runs: list[any] :keyword error_response: The error response. :paramtype error_response: ~flow.models.ErrorResponse :keyword flow_name: :paramtype flow_name: str :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword flow_run_id: :paramtype flow_run_id: str :keyword flow_graph: :paramtype flow_graph: ~flow.models.FlowGraph :keyword flow_graph_layout: :paramtype flow_graph_layout: ~flow.models.FlowGraphLayout :keyword flow_run_resource_id: :paramtype flow_run_resource_id: str :keyword bulk_test_id: :paramtype bulk_test_id: str :keyword batch_inputs: :paramtype batch_inputs: list[dict[str, any]] :keyword batch_data_input: :paramtype batch_data_input: ~flow.models.BatchDataInput :keyword created_by: :paramtype created_by: ~flow.models.SchemaContractsCreatedBy :keyword created_on: :paramtype created_on: ~datetime.datetime :keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun", "PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun". :paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword runtime_name: :paramtype runtime_name: str :keyword aml_compute_name: :paramtype aml_compute_name: str :keyword flow_run_logs: Dictionary of :code:`<string>`. :paramtype flow_run_logs: dict[str, str] :keyword flow_test_mode: Possible values include: "Sync", "Async". :paramtype flow_test_mode: str or ~flow.models.FlowTestMode :keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :keyword working_directory: :paramtype working_directory: str :keyword flow_dag_file_relative_path: :paramtype flow_dag_file_relative_path: str :keyword flow_snapshot_id: :paramtype flow_snapshot_id: str :keyword variant_run_to_evaluation_runs_id_mapping: Dictionary of <componentsΒ·1mlssi7Β·schemasΒ·submitbulkrunresponseΒ·propertiesΒ·variantruntoevaluationrunsidmappingΒ·additionalproperties>. :paramtype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]] """ super(SubmitBulkRunResponse, self).__init__(**kwargs) self.next_action_interval_in_seconds = next_action_interval_in_seconds self.action_type = action_type self.flow_runs = flow_runs self.node_runs = node_runs self.error_response = error_response self.flow_name = flow_name self.flow_run_display_name = flow_run_display_name self.flow_run_id = flow_run_id self.flow_graph = flow_graph self.flow_graph_layout = flow_graph_layout self.flow_run_resource_id = flow_run_resource_id self.bulk_test_id = bulk_test_id self.batch_inputs = batch_inputs self.batch_data_input = batch_data_input self.created_by = created_by self.created_on = created_on self.flow_run_type = flow_run_type self.flow_type = flow_type self.runtime_name = runtime_name self.aml_compute_name = aml_compute_name self.flow_run_logs = flow_run_logs self.flow_test_mode = flow_test_mode self.flow_test_infos = flow_test_infos self.working_directory = working_directory self.flow_dag_file_relative_path = flow_dag_file_relative_path self.flow_snapshot_id = flow_snapshot_id self.variant_run_to_evaluation_runs_id_mapping = variant_run_to_evaluation_runs_id_mapping class SubmitFlowRequest(msrest.serialization.Model): """SubmitFlowRequest. :ivar flow_run_id: :vartype flow_run_id: str :ivar flow_run_display_name: :vartype flow_run_display_name: str :ivar flow_id: :vartype flow_id: str :ivar flow: :vartype flow: ~flow.models.Flow :ivar flow_submit_run_settings: :vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings :ivar async_submission: :vartype async_submission: bool :ivar use_workspace_connection: :vartype use_workspace_connection: bool :ivar use_flow_snapshot_to_submit: :vartype use_flow_snapshot_to_submit: bool :ivar enable_blob_run_artifacts: :vartype enable_blob_run_artifacts: bool :ivar enable_async_flow_test: :vartype enable_async_flow_test: bool :ivar flow_runtime_submission_api_version: Possible values include: "Version1", "Version2". :vartype flow_runtime_submission_api_version: str or ~flow.models.FlowRuntimeSubmissionApiVersion :ivar run_display_name_generation_type: Possible values include: "AutoAppend", "UserProvidedMacro". :vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType """ _attribute_map = { 'flow_run_id': {'key': 'flowRunId', 'type': 'str'}, 'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'}, 'flow_id': {'key': 'flowId', 'type': 'str'}, 'flow': {'key': 'flow', 'type': 'Flow'}, 'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'}, 'async_submission': {'key': 'asyncSubmission', 'type': 'bool'}, 'use_workspace_connection': {'key': 'useWorkspaceConnection', 'type': 'bool'}, 'use_flow_snapshot_to_submit': {'key': 'useFlowSnapshotToSubmit', 'type': 'bool'}, 'enable_blob_run_artifacts': {'key': 'enableBlobRunArtifacts', 'type': 'bool'}, 'enable_async_flow_test': {'key': 'enableAsyncFlowTest', 'type': 'bool'}, 'flow_runtime_submission_api_version': {'key': 'flowRuntimeSubmissionApiVersion', 'type': 'str'}, 'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'}, } def __init__( self, *, flow_run_id: Optional[str] = None, flow_run_display_name: Optional[str] = None, flow_id: Optional[str] = None, flow: Optional["Flow"] = None, flow_submit_run_settings: Optional["FlowSubmitRunSettings"] = None, async_submission: Optional[bool] = None, use_workspace_connection: Optional[bool] = None, use_flow_snapshot_to_submit: Optional[bool] = None, enable_blob_run_artifacts: Optional[bool] = None, enable_async_flow_test: Optional[bool] = None, flow_runtime_submission_api_version: Optional[Union[str, "FlowRuntimeSubmissionApiVersion"]] = None, run_display_name_generation_type: Optional[Union[str, "RunDisplayNameGenerationType"]] = None, **kwargs ): """ :keyword flow_run_id: :paramtype flow_run_id: str :keyword flow_run_display_name: :paramtype flow_run_display_name: str :keyword flow_id: :paramtype flow_id: str :keyword flow: :paramtype flow: ~flow.models.Flow :keyword flow_submit_run_settings: :paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings :keyword async_submission: :paramtype async_submission: bool :keyword use_workspace_connection: :paramtype use_workspace_connection: bool :keyword use_flow_snapshot_to_submit: :paramtype use_flow_snapshot_to_submit: bool :keyword enable_blob_run_artifacts: :paramtype enable_blob_run_artifacts: bool :keyword enable_async_flow_test: :paramtype enable_async_flow_test: bool :keyword flow_runtime_submission_api_version: Possible values include: "Version1", "Version2". :paramtype flow_runtime_submission_api_version: str or ~flow.models.FlowRuntimeSubmissionApiVersion :keyword run_display_name_generation_type: Possible values include: "AutoAppend", "UserProvidedMacro". :paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType """ super(SubmitFlowRequest, self).__init__(**kwargs) self.flow_run_id = flow_run_id self.flow_run_display_name = flow_run_display_name self.flow_id = flow_id self.flow = flow self.flow_submit_run_settings = flow_submit_run_settings self.async_submission = async_submission self.use_workspace_connection = use_workspace_connection self.use_flow_snapshot_to_submit = use_flow_snapshot_to_submit self.enable_blob_run_artifacts = enable_blob_run_artifacts self.enable_async_flow_test = enable_async_flow_test self.flow_runtime_submission_api_version = flow_runtime_submission_api_version self.run_display_name_generation_type = run_display_name_generation_type class SubmitPipelineRunRequest(msrest.serialization.Model): """SubmitPipelineRunRequest. :ivar compute_target: :vartype compute_target: str :ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :ivar step_tags: This is a dictionary. :vartype step_tags: dict[str, str] :ivar experiment_name: :vartype experiment_name: str :ivar pipeline_parameters: This is a dictionary. :vartype pipeline_parameters: dict[str, str] :ivar data_path_assignments: This is a dictionary. :vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :ivar data_set_definition_value_assignments: This is a dictionary. :vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :ivar asset_output_settings_assignments: This is a dictionary. :vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :ivar enable_notification: :vartype enable_notification: bool :ivar sub_pipelines_info: :vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo :ivar display_name: :vartype display_name: str :ivar run_id: :vartype run_id: str :ivar parent_run_id: :vartype parent_run_id: str :ivar graph: :vartype graph: ~flow.models.GraphDraftEntity :ivar pipeline_run_settings: :vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :ivar module_node_run_settings: :vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :ivar module_node_ui_input_settings: :vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, str] :ivar continue_run_on_step_failure: :vartype continue_run_on_step_failure: bool :ivar description: :vartype description: str :ivar properties: This is a dictionary. :vartype properties: dict[str, str] :ivar enforce_rerun: :vartype enforce_rerun: bool :ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ _attribute_map = { 'compute_target': {'key': 'computeTarget', 'type': 'str'}, 'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'}, 'step_tags': {'key': 'stepTags', 'type': '{str}'}, 'experiment_name': {'key': 'experimentName', 'type': 'str'}, 'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'}, 'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'}, 'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'}, 'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'}, 'enable_notification': {'key': 'enableNotification', 'type': 'bool'}, 'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'parent_run_id': {'key': 'parentRunId', 'type': 'str'}, 'graph': {'key': 'graph', 'type': 'GraphDraftEntity'}, 'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'}, 'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'}, 'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'}, 'description': {'key': 'description', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'}, 'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'}, } def __init__( self, *, compute_target: Optional[str] = None, flattened_sub_graphs: Optional[Dict[str, "PipelineSubDraft"]] = None, step_tags: Optional[Dict[str, str]] = None, experiment_name: Optional[str] = None, pipeline_parameters: Optional[Dict[str, str]] = None, data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None, data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None, asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None, enable_notification: Optional[bool] = None, sub_pipelines_info: Optional["SubPipelinesInfo"] = None, display_name: Optional[str] = None, run_id: Optional[str] = None, parent_run_id: Optional[str] = None, graph: Optional["GraphDraftEntity"] = None, pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None, module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None, module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None, tags: Optional[Dict[str, str]] = None, continue_run_on_step_failure: Optional[bool] = None, description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, enforce_rerun: Optional[bool] = None, dataset_access_modes: Optional[Union[str, "DatasetAccessModes"]] = None, **kwargs ): """ :keyword compute_target: :paramtype compute_target: str :keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`. :paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft] :keyword step_tags: This is a dictionary. :paramtype step_tags: dict[str, str] :keyword experiment_name: :paramtype experiment_name: str :keyword pipeline_parameters: This is a dictionary. :paramtype pipeline_parameters: dict[str, str] :keyword data_path_assignments: This is a dictionary. :paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath] :keyword data_set_definition_value_assignments: This is a dictionary. :paramtype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue] :keyword asset_output_settings_assignments: This is a dictionary. :paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings] :keyword enable_notification: :paramtype enable_notification: bool :keyword sub_pipelines_info: :paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo :keyword display_name: :paramtype display_name: str :keyword run_id: :paramtype run_id: str :keyword parent_run_id: :paramtype parent_run_id: str :keyword graph: :paramtype graph: ~flow.models.GraphDraftEntity :keyword pipeline_run_settings: :paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment] :keyword module_node_run_settings: :paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting] :keyword module_node_ui_input_settings: :paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting] :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, str] :keyword continue_run_on_step_failure: :paramtype continue_run_on_step_failure: bool :keyword description: :paramtype description: str :keyword properties: This is a dictionary. :paramtype properties: dict[str, str] :keyword enforce_rerun: :paramtype enforce_rerun: bool :keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset", "AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset". :paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes """ super(SubmitPipelineRunRequest, self).__init__(**kwargs) self.compute_target = compute_target self.flattened_sub_graphs = flattened_sub_graphs self.step_tags = step_tags self.experiment_name = experiment_name self.pipeline_parameters = pipeline_parameters self.data_path_assignments = data_path_assignments self.data_set_definition_value_assignments = data_set_definition_value_assignments self.asset_output_settings_assignments = asset_output_settings_assignments self.enable_notification = enable_notification self.sub_pipelines_info = sub_pipelines_info self.display_name = display_name self.run_id = run_id self.parent_run_id = parent_run_id self.graph = graph self.pipeline_run_settings = pipeline_run_settings self.module_node_run_settings = module_node_run_settings self.module_node_ui_input_settings = module_node_ui_input_settings self.tags = tags self.continue_run_on_step_failure = continue_run_on_step_failure self.description = description self.properties = properties self.enforce_rerun = enforce_rerun self.dataset_access_modes = dataset_access_modes class SubPipelineDefinition(msrest.serialization.Model): """SubPipelineDefinition. :ivar name: :vartype name: str :ivar description: :vartype description: str :ivar default_compute_target: :vartype default_compute_target: ~flow.models.ComputeSetting :ivar default_data_store: :vartype default_data_store: ~flow.models.DatastoreSetting :ivar pipeline_function_name: :vartype pipeline_function_name: str :ivar id: :vartype id: str :ivar parent_definition_id: :vartype parent_definition_id: str :ivar from_module_name: :vartype from_module_name: str :ivar parameter_list: :vartype parameter_list: list[~flow.models.Kwarg] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'default_compute_target': {'key': 'defaultComputeTarget', 'type': 'ComputeSetting'}, 'default_data_store': {'key': 'defaultDataStore', 'type': 'DatastoreSetting'}, 'pipeline_function_name': {'key': 'pipelineFunctionName', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'parent_definition_id': {'key': 'parentDefinitionId', 'type': 'str'}, 'from_module_name': {'key': 'fromModuleName', 'type': 'str'}, 'parameter_list': {'key': 'parameterList', 'type': '[Kwarg]'}, } def __init__( self, *, name: Optional[str] = None, description: Optional[str] = None, default_compute_target: Optional["ComputeSetting"] = None, default_data_store: Optional["DatastoreSetting"] = None, pipeline_function_name: Optional[str] = None, id: Optional[str] = None, parent_definition_id: Optional[str] = None, from_module_name: Optional[str] = None, parameter_list: Optional[List["Kwarg"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword description: :paramtype description: str :keyword default_compute_target: :paramtype default_compute_target: ~flow.models.ComputeSetting :keyword default_data_store: :paramtype default_data_store: ~flow.models.DatastoreSetting :keyword pipeline_function_name: :paramtype pipeline_function_name: str :keyword id: :paramtype id: str :keyword parent_definition_id: :paramtype parent_definition_id: str :keyword from_module_name: :paramtype from_module_name: str :keyword parameter_list: :paramtype parameter_list: list[~flow.models.Kwarg] """ super(SubPipelineDefinition, self).__init__(**kwargs) self.name = name self.description = description self.default_compute_target = default_compute_target self.default_data_store = default_data_store self.pipeline_function_name = pipeline_function_name self.id = id self.parent_definition_id = parent_definition_id self.from_module_name = from_module_name self.parameter_list = parameter_list class SubPipelineParameterAssignment(msrest.serialization.Model): """SubPipelineParameterAssignment. :ivar node_id: :vartype node_id: str :ivar parameter_name: :vartype parameter_name: str """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'parameter_name': {'key': 'parameterName', 'type': 'str'}, } def __init__( self, *, node_id: Optional[str] = None, parameter_name: Optional[str] = None, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword parameter_name: :paramtype parameter_name: str """ super(SubPipelineParameterAssignment, self).__init__(**kwargs) self.node_id = node_id self.parameter_name = parameter_name class SubPipelinesInfo(msrest.serialization.Model): """SubPipelinesInfo. :ivar sub_graph_info: :vartype sub_graph_info: list[~flow.models.SubGraphInfo] :ivar node_id_to_sub_graph_id_mapping: Dictionary of :code:`<string>`. :vartype node_id_to_sub_graph_id_mapping: dict[str, str] :ivar sub_pipeline_definition: :vartype sub_pipeline_definition: list[~flow.models.SubPipelineDefinition] """ _attribute_map = { 'sub_graph_info': {'key': 'subGraphInfo', 'type': '[SubGraphInfo]'}, 'node_id_to_sub_graph_id_mapping': {'key': 'nodeIdToSubGraphIdMapping', 'type': '{str}'}, 'sub_pipeline_definition': {'key': 'subPipelineDefinition', 'type': '[SubPipelineDefinition]'}, } def __init__( self, *, sub_graph_info: Optional[List["SubGraphInfo"]] = None, node_id_to_sub_graph_id_mapping: Optional[Dict[str, str]] = None, sub_pipeline_definition: Optional[List["SubPipelineDefinition"]] = None, **kwargs ): """ :keyword sub_graph_info: :paramtype sub_graph_info: list[~flow.models.SubGraphInfo] :keyword node_id_to_sub_graph_id_mapping: Dictionary of :code:`<string>`. :paramtype node_id_to_sub_graph_id_mapping: dict[str, str] :keyword sub_pipeline_definition: :paramtype sub_pipeline_definition: list[~flow.models.SubPipelineDefinition] """ super(SubPipelinesInfo, self).__init__(**kwargs) self.sub_graph_info = sub_graph_info self.node_id_to_sub_graph_id_mapping = node_id_to_sub_graph_id_mapping self.sub_pipeline_definition = sub_pipeline_definition class SubStatusPeriod(msrest.serialization.Model): """SubStatusPeriod. :ivar name: :vartype name: str :ivar sub_periods: :vartype sub_periods: list[~flow.models.SubStatusPeriod] :ivar start: :vartype start: long :ivar end: :vartype end: long """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'sub_periods': {'key': 'subPeriods', 'type': '[SubStatusPeriod]'}, 'start': {'key': 'start', 'type': 'long'}, 'end': {'key': 'end', 'type': 'long'}, } def __init__( self, *, name: Optional[str] = None, sub_periods: Optional[List["SubStatusPeriod"]] = None, start: Optional[int] = None, end: Optional[int] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword sub_periods: :paramtype sub_periods: list[~flow.models.SubStatusPeriod] :keyword start: :paramtype start: long :keyword end: :paramtype end: long """ super(SubStatusPeriod, self).__init__(**kwargs) self.name = name self.sub_periods = sub_periods self.start = start self.end = end class SweepEarlyTerminationPolicy(msrest.serialization.Model): """SweepEarlyTerminationPolicy. :ivar policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection". :vartype policy_type: str or ~flow.models.EarlyTerminationPolicyType :ivar evaluation_interval: :vartype evaluation_interval: int :ivar delay_evaluation: :vartype delay_evaluation: int :ivar slack_factor: :vartype slack_factor: float :ivar slack_amount: :vartype slack_amount: float :ivar truncation_percentage: :vartype truncation_percentage: int """ _attribute_map = { 'policy_type': {'key': 'policyType', 'type': 'str'}, 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, 'slack_factor': {'key': 'slackFactor', 'type': 'float'}, 'slack_amount': {'key': 'slackAmount', 'type': 'float'}, 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'}, } def __init__( self, *, policy_type: Optional[Union[str, "EarlyTerminationPolicyType"]] = None, evaluation_interval: Optional[int] = None, delay_evaluation: Optional[int] = None, slack_factor: Optional[float] = None, slack_amount: Optional[float] = None, truncation_percentage: Optional[int] = None, **kwargs ): """ :keyword policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection". :paramtype policy_type: str or ~flow.models.EarlyTerminationPolicyType :keyword evaluation_interval: :paramtype evaluation_interval: int :keyword delay_evaluation: :paramtype delay_evaluation: int :keyword slack_factor: :paramtype slack_factor: float :keyword slack_amount: :paramtype slack_amount: float :keyword truncation_percentage: :paramtype truncation_percentage: int """ super(SweepEarlyTerminationPolicy, self).__init__(**kwargs) self.policy_type = policy_type self.evaluation_interval = evaluation_interval self.delay_evaluation = delay_evaluation self.slack_factor = slack_factor self.slack_amount = slack_amount self.truncation_percentage = truncation_percentage class SweepSettings(msrest.serialization.Model): """SweepSettings. :ivar limits: :vartype limits: ~flow.models.SweepSettingsLimits :ivar search_space: :vartype search_space: list[dict[str, str]] :ivar sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian". :vartype sampling_algorithm: str or ~flow.models.SamplingAlgorithmType :ivar early_termination: :vartype early_termination: ~flow.models.SweepEarlyTerminationPolicy """ _attribute_map = { 'limits': {'key': 'limits', 'type': 'SweepSettingsLimits'}, 'search_space': {'key': 'searchSpace', 'type': '[{str}]'}, 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, 'early_termination': {'key': 'earlyTermination', 'type': 'SweepEarlyTerminationPolicy'}, } def __init__( self, *, limits: Optional["SweepSettingsLimits"] = None, search_space: Optional[List[Dict[str, str]]] = None, sampling_algorithm: Optional[Union[str, "SamplingAlgorithmType"]] = None, early_termination: Optional["SweepEarlyTerminationPolicy"] = None, **kwargs ): """ :keyword limits: :paramtype limits: ~flow.models.SweepSettingsLimits :keyword search_space: :paramtype search_space: list[dict[str, str]] :keyword sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian". :paramtype sampling_algorithm: str or ~flow.models.SamplingAlgorithmType :keyword early_termination: :paramtype early_termination: ~flow.models.SweepEarlyTerminationPolicy """ super(SweepSettings, self).__init__(**kwargs) self.limits = limits self.search_space = search_space self.sampling_algorithm = sampling_algorithm self.early_termination = early_termination class SweepSettingsLimits(msrest.serialization.Model): """SweepSettingsLimits. :ivar max_total_trials: :vartype max_total_trials: int :ivar max_concurrent_trials: :vartype max_concurrent_trials: int """ _attribute_map = { 'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'}, 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, } def __init__( self, *, max_total_trials: Optional[int] = None, max_concurrent_trials: Optional[int] = None, **kwargs ): """ :keyword max_total_trials: :paramtype max_total_trials: int :keyword max_concurrent_trials: :paramtype max_concurrent_trials: int """ super(SweepSettingsLimits, self).__init__(**kwargs) self.max_total_trials = max_total_trials self.max_concurrent_trials = max_concurrent_trials class SystemData(msrest.serialization.Model): """SystemData. :ivar created_at: :vartype created_at: ~datetime.datetime :ivar created_by: :vartype created_by: str :ivar created_by_type: Possible values include: "User", "Application", "ManagedIdentity", "Key". :vartype created_by_type: str or ~flow.models.UserType :ivar last_modified_at: :vartype last_modified_at: ~datetime.datetime :ivar last_modified_by: :vartype last_modified_by: str :ivar last_modified_by_type: Possible values include: "User", "Application", "ManagedIdentity", "Key". :vartype last_modified_by_type: str or ~flow.models.UserType """ _attribute_map = { 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, 'created_by': {'key': 'createdBy', 'type': 'str'}, 'created_by_type': {'key': 'createdByType', 'type': 'str'}, 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, } def __init__( self, *, created_at: Optional[datetime.datetime] = None, created_by: Optional[str] = None, created_by_type: Optional[Union[str, "UserType"]] = None, last_modified_at: Optional[datetime.datetime] = None, last_modified_by: Optional[str] = None, last_modified_by_type: Optional[Union[str, "UserType"]] = None, **kwargs ): """ :keyword created_at: :paramtype created_at: ~datetime.datetime :keyword created_by: :paramtype created_by: str :keyword created_by_type: Possible values include: "User", "Application", "ManagedIdentity", "Key". :paramtype created_by_type: str or ~flow.models.UserType :keyword last_modified_at: :paramtype last_modified_at: ~datetime.datetime :keyword last_modified_by: :paramtype last_modified_by: str :keyword last_modified_by_type: Possible values include: "User", "Application", "ManagedIdentity", "Key". :paramtype last_modified_by_type: str or ~flow.models.UserType """ super(SystemData, self).__init__(**kwargs) self.created_at = created_at self.created_by = created_by self.created_by_type = created_by_type self.last_modified_at = last_modified_at self.last_modified_by = last_modified_by self.last_modified_by_type = last_modified_by_type class SystemMeta(msrest.serialization.Model): """SystemMeta. :ivar identifier_hash: :vartype identifier_hash: str :ivar extra_hash: :vartype extra_hash: str :ivar content_hash: :vartype content_hash: str :ivar identifier_hashes: :vartype identifier_hashes: ~flow.models.SystemMetaIdentifierHashes :ivar extra_hashes: :vartype extra_hashes: ~flow.models.SystemMetaExtraHashes """ _attribute_map = { 'identifier_hash': {'key': 'identifierHash', 'type': 'str'}, 'extra_hash': {'key': 'extraHash', 'type': 'str'}, 'content_hash': {'key': 'contentHash', 'type': 'str'}, 'identifier_hashes': {'key': 'identifierHashes', 'type': 'SystemMetaIdentifierHashes'}, 'extra_hashes': {'key': 'extraHashes', 'type': 'SystemMetaExtraHashes'}, } def __init__( self, *, identifier_hash: Optional[str] = None, extra_hash: Optional[str] = None, content_hash: Optional[str] = None, identifier_hashes: Optional["SystemMetaIdentifierHashes"] = None, extra_hashes: Optional["SystemMetaExtraHashes"] = None, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword extra_hash: :paramtype extra_hash: str :keyword content_hash: :paramtype content_hash: str :keyword identifier_hashes: :paramtype identifier_hashes: ~flow.models.SystemMetaIdentifierHashes :keyword extra_hashes: :paramtype extra_hashes: ~flow.models.SystemMetaExtraHashes """ super(SystemMeta, self).__init__(**kwargs) self.identifier_hash = identifier_hash self.extra_hash = extra_hash self.content_hash = content_hash self.identifier_hashes = identifier_hashes self.extra_hashes = extra_hashes class SystemMetaExtraHashes(msrest.serialization.Model): """SystemMetaExtraHashes. :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str """ _attribute_map = { 'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'}, } def __init__( self, *, identifier_hash: Optional[str] = None, identifier_hash_v2: Optional[str] = None, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str """ super(SystemMetaExtraHashes, self).__init__(**kwargs) self.identifier_hash = identifier_hash self.identifier_hash_v2 = identifier_hash_v2 class SystemMetaIdentifierHashes(msrest.serialization.Model): """SystemMetaIdentifierHashes. :ivar identifier_hash: :vartype identifier_hash: str :ivar identifier_hash_v2: :vartype identifier_hash_v2: str """ _attribute_map = { 'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'}, 'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'}, } def __init__( self, *, identifier_hash: Optional[str] = None, identifier_hash_v2: Optional[str] = None, **kwargs ): """ :keyword identifier_hash: :paramtype identifier_hash: str :keyword identifier_hash_v2: :paramtype identifier_hash_v2: str """ super(SystemMetaIdentifierHashes, self).__init__(**kwargs) self.identifier_hash = identifier_hash self.identifier_hash_v2 = identifier_hash_v2 class TargetLags(msrest.serialization.Model): """TargetLags. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.TargetLagsMode :ivar values: :vartype values: list[int] """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'values': {'key': 'values', 'type': '[int]'}, } def __init__( self, *, mode: Optional[Union[str, "TargetLagsMode"]] = None, values: Optional[List[int]] = None, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.TargetLagsMode :keyword values: :paramtype values: list[int] """ super(TargetLags, self).__init__(**kwargs) self.mode = mode self.values = values class TargetRollingWindowSize(msrest.serialization.Model): """TargetRollingWindowSize. :ivar mode: Possible values include: "Auto", "Custom". :vartype mode: str or ~flow.models.TargetRollingWindowSizeMode :ivar value: :vartype value: int """ _attribute_map = { 'mode': {'key': 'mode', 'type': 'str'}, 'value': {'key': 'value', 'type': 'int'}, } def __init__( self, *, mode: Optional[Union[str, "TargetRollingWindowSizeMode"]] = None, value: Optional[int] = None, **kwargs ): """ :keyword mode: Possible values include: "Auto", "Custom". :paramtype mode: str or ~flow.models.TargetRollingWindowSizeMode :keyword value: :paramtype value: int """ super(TargetRollingWindowSize, self).__init__(**kwargs) self.mode = mode self.value = value class TargetSelectorConfiguration(msrest.serialization.Model): """TargetSelectorConfiguration. :ivar low_priority_vm_tolerant: :vartype low_priority_vm_tolerant: bool :ivar cluster_block_list: :vartype cluster_block_list: list[str] :ivar compute_type: :vartype compute_type: str :ivar instance_type: :vartype instance_type: list[str] :ivar instance_types: :vartype instance_types: list[str] :ivar my_resource_only: :vartype my_resource_only: bool :ivar plan_id: :vartype plan_id: str :ivar plan_region_id: :vartype plan_region_id: str :ivar region: :vartype region: list[str] :ivar regions: :vartype regions: list[str] :ivar vc_block_list: :vartype vc_block_list: list[str] """ _attribute_map = { 'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'}, 'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'instance_type': {'key': 'instanceType', 'type': '[str]'}, 'instance_types': {'key': 'instanceTypes', 'type': '[str]'}, 'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'}, 'plan_id': {'key': 'planId', 'type': 'str'}, 'plan_region_id': {'key': 'planRegionId', 'type': 'str'}, 'region': {'key': 'region', 'type': '[str]'}, 'regions': {'key': 'regions', 'type': '[str]'}, 'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'}, } def __init__( self, *, low_priority_vm_tolerant: Optional[bool] = None, cluster_block_list: Optional[List[str]] = None, compute_type: Optional[str] = None, instance_type: Optional[List[str]] = None, instance_types: Optional[List[str]] = None, my_resource_only: Optional[bool] = None, plan_id: Optional[str] = None, plan_region_id: Optional[str] = None, region: Optional[List[str]] = None, regions: Optional[List[str]] = None, vc_block_list: Optional[List[str]] = None, **kwargs ): """ :keyword low_priority_vm_tolerant: :paramtype low_priority_vm_tolerant: bool :keyword cluster_block_list: :paramtype cluster_block_list: list[str] :keyword compute_type: :paramtype compute_type: str :keyword instance_type: :paramtype instance_type: list[str] :keyword instance_types: :paramtype instance_types: list[str] :keyword my_resource_only: :paramtype my_resource_only: bool :keyword plan_id: :paramtype plan_id: str :keyword plan_region_id: :paramtype plan_region_id: str :keyword region: :paramtype region: list[str] :keyword regions: :paramtype regions: list[str] :keyword vc_block_list: :paramtype vc_block_list: list[str] """ super(TargetSelectorConfiguration, self).__init__(**kwargs) self.low_priority_vm_tolerant = low_priority_vm_tolerant self.cluster_block_list = cluster_block_list self.compute_type = compute_type self.instance_type = instance_type self.instance_types = instance_types self.my_resource_only = my_resource_only self.plan_id = plan_id self.plan_region_id = plan_region_id self.region = region self.regions = regions self.vc_block_list = vc_block_list class Task(msrest.serialization.Model): """Task. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: :vartype id: int :ivar exception: Anything. :vartype exception: any :ivar status: Possible values include: "Created", "WaitingForActivation", "WaitingToRun", "Running", "WaitingForChildrenToComplete", "RanToCompletion", "Canceled", "Faulted". :vartype status: str or ~flow.models.TaskStatus :ivar is_canceled: :vartype is_canceled: bool :ivar is_completed: :vartype is_completed: bool :ivar is_completed_successfully: :vartype is_completed_successfully: bool :ivar creation_options: Possible values include: "None", "PreferFairness", "LongRunning", "AttachedToParent", "DenyChildAttach", "HideScheduler", "RunContinuationsAsynchronously". :vartype creation_options: str or ~flow.models.TaskCreationOptions :ivar async_state: Anything. :vartype async_state: any :ivar is_faulted: :vartype is_faulted: bool """ _validation = { 'id': {'readonly': True}, 'exception': {'readonly': True}, 'is_canceled': {'readonly': True}, 'is_completed': {'readonly': True}, 'is_completed_successfully': {'readonly': True}, 'async_state': {'readonly': True}, 'is_faulted': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'int'}, 'exception': {'key': 'exception', 'type': 'object'}, 'status': {'key': 'status', 'type': 'str'}, 'is_canceled': {'key': 'isCanceled', 'type': 'bool'}, 'is_completed': {'key': 'isCompleted', 'type': 'bool'}, 'is_completed_successfully': {'key': 'isCompletedSuccessfully', 'type': 'bool'}, 'creation_options': {'key': 'creationOptions', 'type': 'str'}, 'async_state': {'key': 'asyncState', 'type': 'object'}, 'is_faulted': {'key': 'isFaulted', 'type': 'bool'}, } def __init__( self, *, status: Optional[Union[str, "TaskStatus"]] = None, creation_options: Optional[Union[str, "TaskCreationOptions"]] = None, **kwargs ): """ :keyword status: Possible values include: "Created", "WaitingForActivation", "WaitingToRun", "Running", "WaitingForChildrenToComplete", "RanToCompletion", "Canceled", "Faulted". :paramtype status: str or ~flow.models.TaskStatus :keyword creation_options: Possible values include: "None", "PreferFairness", "LongRunning", "AttachedToParent", "DenyChildAttach", "HideScheduler", "RunContinuationsAsynchronously". :paramtype creation_options: str or ~flow.models.TaskCreationOptions """ super(Task, self).__init__(**kwargs) self.id = None self.exception = None self.status = status self.is_canceled = None self.is_completed = None self.is_completed_successfully = None self.creation_options = creation_options self.async_state = None self.is_faulted = None class TaskControlFlowInfo(msrest.serialization.Model): """TaskControlFlowInfo. :ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor". :vartype control_flow_type: str or ~flow.models.ControlFlowType :ivar iteration_index: :vartype iteration_index: int :ivar item_name: :vartype item_name: str :ivar parameters_overwritten: Dictionary of :code:`<string>`. :vartype parameters_overwritten: dict[str, str] :ivar is_reused: :vartype is_reused: bool """ _attribute_map = { 'control_flow_type': {'key': 'controlFlowType', 'type': 'str'}, 'iteration_index': {'key': 'iterationIndex', 'type': 'int'}, 'item_name': {'key': 'itemName', 'type': 'str'}, 'parameters_overwritten': {'key': 'parametersOverwritten', 'type': '{str}'}, 'is_reused': {'key': 'isReused', 'type': 'bool'}, } def __init__( self, *, control_flow_type: Optional[Union[str, "ControlFlowType"]] = None, iteration_index: Optional[int] = None, item_name: Optional[str] = None, parameters_overwritten: Optional[Dict[str, str]] = None, is_reused: Optional[bool] = None, **kwargs ): """ :keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor". :paramtype control_flow_type: str or ~flow.models.ControlFlowType :keyword iteration_index: :paramtype iteration_index: int :keyword item_name: :paramtype item_name: str :keyword parameters_overwritten: Dictionary of :code:`<string>`. :paramtype parameters_overwritten: dict[str, str] :keyword is_reused: :paramtype is_reused: bool """ super(TaskControlFlowInfo, self).__init__(**kwargs) self.control_flow_type = control_flow_type self.iteration_index = iteration_index self.item_name = item_name self.parameters_overwritten = parameters_overwritten self.is_reused = is_reused class TaskReuseInfo(msrest.serialization.Model): """TaskReuseInfo. :ivar experiment_id: :vartype experiment_id: str :ivar pipeline_run_id: :vartype pipeline_run_id: str :ivar node_id: :vartype node_id: str :ivar request_id: :vartype request_id: str :ivar run_id: :vartype run_id: str :ivar node_start_time: :vartype node_start_time: ~datetime.datetime :ivar node_end_time: :vartype node_end_time: ~datetime.datetime """ _attribute_map = { 'experiment_id': {'key': 'experimentId', 'type': 'str'}, 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, 'node_id': {'key': 'nodeId', 'type': 'str'}, 'request_id': {'key': 'requestId', 'type': 'str'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'node_start_time': {'key': 'nodeStartTime', 'type': 'iso-8601'}, 'node_end_time': {'key': 'nodeEndTime', 'type': 'iso-8601'}, } def __init__( self, *, experiment_id: Optional[str] = None, pipeline_run_id: Optional[str] = None, node_id: Optional[str] = None, request_id: Optional[str] = None, run_id: Optional[str] = None, node_start_time: Optional[datetime.datetime] = None, node_end_time: Optional[datetime.datetime] = None, **kwargs ): """ :keyword experiment_id: :paramtype experiment_id: str :keyword pipeline_run_id: :paramtype pipeline_run_id: str :keyword node_id: :paramtype node_id: str :keyword request_id: :paramtype request_id: str :keyword run_id: :paramtype run_id: str :keyword node_start_time: :paramtype node_start_time: ~datetime.datetime :keyword node_end_time: :paramtype node_end_time: ~datetime.datetime """ super(TaskReuseInfo, self).__init__(**kwargs) self.experiment_id = experiment_id self.pipeline_run_id = pipeline_run_id self.node_id = node_id self.request_id = request_id self.run_id = run_id self.node_start_time = node_start_time self.node_end_time = node_end_time class TensorflowConfiguration(msrest.serialization.Model): """TensorflowConfiguration. :ivar worker_count: :vartype worker_count: int :ivar parameter_server_count: :vartype parameter_server_count: int """ _attribute_map = { 'worker_count': {'key': 'workerCount', 'type': 'int'}, 'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'}, } def __init__( self, *, worker_count: Optional[int] = None, parameter_server_count: Optional[int] = None, **kwargs ): """ :keyword worker_count: :paramtype worker_count: int :keyword parameter_server_count: :paramtype parameter_server_count: int """ super(TensorflowConfiguration, self).__init__(**kwargs) self.worker_count = worker_count self.parameter_server_count = parameter_server_count class TestDataSettings(msrest.serialization.Model): """TestDataSettings. :ivar test_data_size: :vartype test_data_size: float """ _attribute_map = { 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, } def __init__( self, *, test_data_size: Optional[float] = None, **kwargs ): """ :keyword test_data_size: :paramtype test_data_size: float """ super(TestDataSettings, self).__init__(**kwargs) self.test_data_size = test_data_size class Tool(msrest.serialization.Model): """Tool. :ivar name: :vartype name: str :ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm", "csharp". :vartype type: str or ~flow.models.ToolType :ivar inputs: This is a dictionary. :vartype inputs: dict[str, ~flow.models.InputDefinition] :ivar outputs: This is a dictionary. :vartype outputs: dict[str, ~flow.models.OutputDefinition] :ivar description: :vartype description: str :ivar connection_type: :vartype connection_type: list[str or ~flow.models.ConnectionType] :ivar module: :vartype module: str :ivar class_name: :vartype class_name: str :ivar source: :vartype source: str :ivar lkg_code: :vartype lkg_code: str :ivar code: :vartype code: str :ivar function: :vartype function: str :ivar action_type: :vartype action_type: str :ivar provider_config: This is a dictionary. :vartype provider_config: dict[str, ~flow.models.InputDefinition] :ivar function_config: This is a dictionary. :vartype function_config: dict[str, ~flow.models.InputDefinition] :ivar icon: Anything. :vartype icon: any :ivar category: :vartype category: str :ivar tags: A set of tags. This is a dictionary. :vartype tags: dict[str, any] :ivar is_builtin: :vartype is_builtin: bool :ivar package: :vartype package: str :ivar package_version: :vartype package_version: str :ivar default_prompt: :vartype default_prompt: str :ivar enable_kwargs: :vartype enable_kwargs: bool :ivar deprecated_tools: :vartype deprecated_tools: list[str] :ivar tool_state: Possible values include: "Stable", "Preview", "Deprecated". :vartype tool_state: str or ~flow.models.ToolState """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'inputs': {'key': 'inputs', 'type': '{InputDefinition}'}, 'outputs': {'key': 'outputs', 'type': '{OutputDefinition}'}, 'description': {'key': 'description', 'type': 'str'}, 'connection_type': {'key': 'connection_type', 'type': '[str]'}, 'module': {'key': 'module', 'type': 'str'}, 'class_name': {'key': 'class_name', 'type': 'str'}, 'source': {'key': 'source', 'type': 'str'}, 'lkg_code': {'key': 'lkgCode', 'type': 'str'}, 'code': {'key': 'code', 'type': 'str'}, 'function': {'key': 'function', 'type': 'str'}, 'action_type': {'key': 'action_type', 'type': 'str'}, 'provider_config': {'key': 'provider_config', 'type': '{InputDefinition}'}, 'function_config': {'key': 'function_config', 'type': '{InputDefinition}'}, 'icon': {'key': 'icon', 'type': 'object'}, 'category': {'key': 'category', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{object}'}, 'is_builtin': {'key': 'is_builtin', 'type': 'bool'}, 'package': {'key': 'package', 'type': 'str'}, 'package_version': {'key': 'package_version', 'type': 'str'}, 'default_prompt': {'key': 'default_prompt', 'type': 'str'}, 'enable_kwargs': {'key': 'enable_kwargs', 'type': 'bool'}, 'deprecated_tools': {'key': 'deprecated_tools', 'type': '[str]'}, 'tool_state': {'key': 'tool_state', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, type: Optional[Union[str, "ToolType"]] = None, inputs: Optional[Dict[str, "InputDefinition"]] = None, outputs: Optional[Dict[str, "OutputDefinition"]] = None, description: Optional[str] = None, connection_type: Optional[List[Union[str, "ConnectionType"]]] = None, module: Optional[str] = None, class_name: Optional[str] = None, source: Optional[str] = None, lkg_code: Optional[str] = None, code: Optional[str] = None, function: Optional[str] = None, action_type: Optional[str] = None, provider_config: Optional[Dict[str, "InputDefinition"]] = None, function_config: Optional[Dict[str, "InputDefinition"]] = None, icon: Optional[Any] = None, category: Optional[str] = None, tags: Optional[Dict[str, Any]] = None, is_builtin: Optional[bool] = None, package: Optional[str] = None, package_version: Optional[str] = None, default_prompt: Optional[str] = None, enable_kwargs: Optional[bool] = None, deprecated_tools: Optional[List[str]] = None, tool_state: Optional[Union[str, "ToolState"]] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm", "csharp". :paramtype type: str or ~flow.models.ToolType :keyword inputs: This is a dictionary. :paramtype inputs: dict[str, ~flow.models.InputDefinition] :keyword outputs: This is a dictionary. :paramtype outputs: dict[str, ~flow.models.OutputDefinition] :keyword description: :paramtype description: str :keyword connection_type: :paramtype connection_type: list[str or ~flow.models.ConnectionType] :keyword module: :paramtype module: str :keyword class_name: :paramtype class_name: str :keyword source: :paramtype source: str :keyword lkg_code: :paramtype lkg_code: str :keyword code: :paramtype code: str :keyword function: :paramtype function: str :keyword action_type: :paramtype action_type: str :keyword provider_config: This is a dictionary. :paramtype provider_config: dict[str, ~flow.models.InputDefinition] :keyword function_config: This is a dictionary. :paramtype function_config: dict[str, ~flow.models.InputDefinition] :keyword icon: Anything. :paramtype icon: any :keyword category: :paramtype category: str :keyword tags: A set of tags. This is a dictionary. :paramtype tags: dict[str, any] :keyword is_builtin: :paramtype is_builtin: bool :keyword package: :paramtype package: str :keyword package_version: :paramtype package_version: str :keyword default_prompt: :paramtype default_prompt: str :keyword enable_kwargs: :paramtype enable_kwargs: bool :keyword deprecated_tools: :paramtype deprecated_tools: list[str] :keyword tool_state: Possible values include: "Stable", "Preview", "Deprecated". :paramtype tool_state: str or ~flow.models.ToolState """ super(Tool, self).__init__(**kwargs) self.name = name self.type = type self.inputs = inputs self.outputs = outputs self.description = description self.connection_type = connection_type self.module = module self.class_name = class_name self.source = source self.lkg_code = lkg_code self.code = code self.function = function self.action_type = action_type self.provider_config = provider_config self.function_config = function_config self.icon = icon self.category = category self.tags = tags self.is_builtin = is_builtin self.package = package self.package_version = package_version self.default_prompt = default_prompt self.enable_kwargs = enable_kwargs self.deprecated_tools = deprecated_tools self.tool_state = tool_state class ToolFuncResponse(msrest.serialization.Model): """ToolFuncResponse. :ivar result: Anything. :vartype result: any :ivar logs: This is a dictionary. :vartype logs: dict[str, str] """ _attribute_map = { 'result': {'key': 'result', 'type': 'object'}, 'logs': {'key': 'logs', 'type': '{str}'}, } def __init__( self, *, result: Optional[Any] = None, logs: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword result: Anything. :paramtype result: any :keyword logs: This is a dictionary. :paramtype logs: dict[str, str] """ super(ToolFuncResponse, self).__init__(**kwargs) self.result = result self.logs = logs class ToolInputDynamicList(msrest.serialization.Model): """ToolInputDynamicList. :ivar func_path: :vartype func_path: str :ivar func_kwargs: :vartype func_kwargs: list[dict[str, any]] """ _attribute_map = { 'func_path': {'key': 'func_path', 'type': 'str'}, 'func_kwargs': {'key': 'func_kwargs', 'type': '[{object}]'}, } def __init__( self, *, func_path: Optional[str] = None, func_kwargs: Optional[List[Dict[str, Any]]] = None, **kwargs ): """ :keyword func_path: :paramtype func_path: str :keyword func_kwargs: :paramtype func_kwargs: list[dict[str, any]] """ super(ToolInputDynamicList, self).__init__(**kwargs) self.func_path = func_path self.func_kwargs = func_kwargs class ToolInputGeneratedBy(msrest.serialization.Model): """ToolInputGeneratedBy. :ivar func_path: :vartype func_path: str :ivar func_kwargs: :vartype func_kwargs: list[dict[str, any]] :ivar reverse_func_path: :vartype reverse_func_path: str """ _attribute_map = { 'func_path': {'key': 'func_path', 'type': 'str'}, 'func_kwargs': {'key': 'func_kwargs', 'type': '[{object}]'}, 'reverse_func_path': {'key': 'reverse_func_path', 'type': 'str'}, } def __init__( self, *, func_path: Optional[str] = None, func_kwargs: Optional[List[Dict[str, Any]]] = None, reverse_func_path: Optional[str] = None, **kwargs ): """ :keyword func_path: :paramtype func_path: str :keyword func_kwargs: :paramtype func_kwargs: list[dict[str, any]] :keyword reverse_func_path: :paramtype reverse_func_path: str """ super(ToolInputGeneratedBy, self).__init__(**kwargs) self.func_path = func_path self.func_kwargs = func_kwargs self.reverse_func_path = reverse_func_path class ToolMetaDto(msrest.serialization.Model): """ToolMetaDto. :ivar tools: This is a dictionary. :vartype tools: dict[str, ~flow.models.Tool] :ivar errors: This is a dictionary. :vartype errors: dict[str, ~flow.models.ErrorResponse] """ _attribute_map = { 'tools': {'key': 'tools', 'type': '{Tool}'}, 'errors': {'key': 'errors', 'type': '{ErrorResponse}'}, } def __init__( self, *, tools: Optional[Dict[str, "Tool"]] = None, errors: Optional[Dict[str, "ErrorResponse"]] = None, **kwargs ): """ :keyword tools: This is a dictionary. :paramtype tools: dict[str, ~flow.models.Tool] :keyword errors: This is a dictionary. :paramtype errors: dict[str, ~flow.models.ErrorResponse] """ super(ToolMetaDto, self).__init__(**kwargs) self.tools = tools self.errors = errors class ToolSetting(msrest.serialization.Model): """ToolSetting. :ivar providers: :vartype providers: list[~flow.models.ProviderEntity] """ _attribute_map = { 'providers': {'key': 'providers', 'type': '[ProviderEntity]'}, } def __init__( self, *, providers: Optional[List["ProviderEntity"]] = None, **kwargs ): """ :keyword providers: :paramtype providers: list[~flow.models.ProviderEntity] """ super(ToolSetting, self).__init__(**kwargs) self.providers = providers class ToolSourceMeta(msrest.serialization.Model): """ToolSourceMeta. :ivar tool_type: :vartype tool_type: str """ _attribute_map = { 'tool_type': {'key': 'tool_type', 'type': 'str'}, } def __init__( self, *, tool_type: Optional[str] = None, **kwargs ): """ :keyword tool_type: :paramtype tool_type: str """ super(ToolSourceMeta, self).__init__(**kwargs) self.tool_type = tool_type class TorchDistributedConfiguration(msrest.serialization.Model): """TorchDistributedConfiguration. :ivar process_count_per_node: :vartype process_count_per_node: int """ _attribute_map = { 'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'}, } def __init__( self, *, process_count_per_node: Optional[int] = None, **kwargs ): """ :keyword process_count_per_node: :paramtype process_count_per_node: int """ super(TorchDistributedConfiguration, self).__init__(**kwargs) self.process_count_per_node = process_count_per_node class TrainingDiagnosticConfiguration(msrest.serialization.Model): """TrainingDiagnosticConfiguration. :ivar job_heart_beat_timeout_seconds: :vartype job_heart_beat_timeout_seconds: int """ _attribute_map = { 'job_heart_beat_timeout_seconds': {'key': 'jobHeartBeatTimeoutSeconds', 'type': 'int'}, } def __init__( self, *, job_heart_beat_timeout_seconds: Optional[int] = None, **kwargs ): """ :keyword job_heart_beat_timeout_seconds: :paramtype job_heart_beat_timeout_seconds: int """ super(TrainingDiagnosticConfiguration, self).__init__(**kwargs) self.job_heart_beat_timeout_seconds = job_heart_beat_timeout_seconds class TrainingOutput(msrest.serialization.Model): """TrainingOutput. :ivar training_output_type: Possible values include: "Metrics", "Model". :vartype training_output_type: str or ~flow.models.TrainingOutputType :ivar iteration: :vartype iteration: int :ivar metric: :vartype metric: str :ivar model_file: :vartype model_file: str """ _attribute_map = { 'training_output_type': {'key': 'trainingOutputType', 'type': 'str'}, 'iteration': {'key': 'iteration', 'type': 'int'}, 'metric': {'key': 'metric', 'type': 'str'}, 'model_file': {'key': 'modelFile', 'type': 'str'}, } def __init__( self, *, training_output_type: Optional[Union[str, "TrainingOutputType"]] = None, iteration: Optional[int] = None, metric: Optional[str] = None, model_file: Optional[str] = None, **kwargs ): """ :keyword training_output_type: Possible values include: "Metrics", "Model". :paramtype training_output_type: str or ~flow.models.TrainingOutputType :keyword iteration: :paramtype iteration: int :keyword metric: :paramtype metric: str :keyword model_file: :paramtype model_file: str """ super(TrainingOutput, self).__init__(**kwargs) self.training_output_type = training_output_type self.iteration = iteration self.metric = metric self.model_file = model_file class TrainingSettings(msrest.serialization.Model): """TrainingSettings. :ivar block_list_models: :vartype block_list_models: list[str] :ivar allow_list_models: :vartype allow_list_models: list[str] :ivar enable_dnn_training: :vartype enable_dnn_training: bool :ivar enable_onnx_compatible_models: :vartype enable_onnx_compatible_models: bool :ivar stack_ensemble_settings: :vartype stack_ensemble_settings: ~flow.models.StackEnsembleSettings :ivar enable_stack_ensemble: :vartype enable_stack_ensemble: bool :ivar enable_vote_ensemble: :vartype enable_vote_ensemble: bool :ivar ensemble_model_download_timeout: :vartype ensemble_model_download_timeout: str :ivar enable_model_explainability: :vartype enable_model_explainability: bool :ivar training_mode: Possible values include: "Distributed", "NonDistributed", "Auto". :vartype training_mode: str or ~flow.models.TabularTrainingMode """ _attribute_map = { 'block_list_models': {'key': 'blockListModels', 'type': '[str]'}, 'allow_list_models': {'key': 'allowListModels', 'type': '[str]'}, 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'str'}, 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, 'training_mode': {'key': 'trainingMode', 'type': 'str'}, } def __init__( self, *, block_list_models: Optional[List[str]] = None, allow_list_models: Optional[List[str]] = None, enable_dnn_training: Optional[bool] = None, enable_onnx_compatible_models: Optional[bool] = None, stack_ensemble_settings: Optional["StackEnsembleSettings"] = None, enable_stack_ensemble: Optional[bool] = None, enable_vote_ensemble: Optional[bool] = None, ensemble_model_download_timeout: Optional[str] = None, enable_model_explainability: Optional[bool] = None, training_mode: Optional[Union[str, "TabularTrainingMode"]] = None, **kwargs ): """ :keyword block_list_models: :paramtype block_list_models: list[str] :keyword allow_list_models: :paramtype allow_list_models: list[str] :keyword enable_dnn_training: :paramtype enable_dnn_training: bool :keyword enable_onnx_compatible_models: :paramtype enable_onnx_compatible_models: bool :keyword stack_ensemble_settings: :paramtype stack_ensemble_settings: ~flow.models.StackEnsembleSettings :keyword enable_stack_ensemble: :paramtype enable_stack_ensemble: bool :keyword enable_vote_ensemble: :paramtype enable_vote_ensemble: bool :keyword ensemble_model_download_timeout: :paramtype ensemble_model_download_timeout: str :keyword enable_model_explainability: :paramtype enable_model_explainability: bool :keyword training_mode: Possible values include: "Distributed", "NonDistributed", "Auto". :paramtype training_mode: str or ~flow.models.TabularTrainingMode """ super(TrainingSettings, self).__init__(**kwargs) self.block_list_models = block_list_models self.allow_list_models = allow_list_models self.enable_dnn_training = enable_dnn_training self.enable_onnx_compatible_models = enable_onnx_compatible_models self.stack_ensemble_settings = stack_ensemble_settings self.enable_stack_ensemble = enable_stack_ensemble self.enable_vote_ensemble = enable_vote_ensemble self.ensemble_model_download_timeout = ensemble_model_download_timeout self.enable_model_explainability = enable_model_explainability self.training_mode = training_mode class TriggerAsyncOperationStatus(msrest.serialization.Model): """TriggerAsyncOperationStatus. :ivar id: :vartype id: str :ivar operation_type: Possible values include: "Create", "Update", "Delete", "CreateOrUpdate". :vartype operation_type: str or ~flow.models.TriggerOperationType :ivar provisioning_status: Possible values include: "Creating", "Updating", "Deleting", "Succeeded", "Failed", "Canceled". :vartype provisioning_status: str or ~flow.models.ScheduleProvisioningStatus :ivar created_time: :vartype created_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime :ivar error: The error response. :vartype error: ~flow.models.ErrorResponse :ivar status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing", "EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent", "ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed", "MultipleChoices", "Ambiguous", "MovedPermanently", "Moved", "Found", "Redirect", "SeeOther", "RedirectMethod", "NotModified", "UseProxy", "Unused", "TemporaryRedirect", "RedirectKeepVerb", "PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired", "Forbidden", "NotFound", "MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired", "RequestTimeout", "Conflict", "Gone", "LengthRequired", "PreconditionFailed", "RequestEntityTooLarge", "RequestUriTooLong", "UnsupportedMediaType", "RequestedRangeNotSatisfiable", "ExpectationFailed", "MisdirectedRequest", "UnprocessableEntity", "Locked", "FailedDependency", "UpgradeRequired", "PreconditionRequired", "TooManyRequests", "RequestHeaderFieldsTooLarge", "UnavailableForLegalReasons", "InternalServerError", "NotImplemented", "BadGateway", "ServiceUnavailable", "GatewayTimeout", "HttpVersionNotSupported", "VariantAlsoNegotiates", "InsufficientStorage", "LoopDetected", "NotExtended", "NetworkAuthenticationRequired". :vartype status_code: str or ~flow.models.HttpStatusCode """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'operation_type': {'key': 'operationType', 'type': 'str'}, 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'error': {'key': 'error', 'type': 'ErrorResponse'}, 'status_code': {'key': 'statusCode', 'type': 'str'}, } def __init__( self, *, id: Optional[str] = None, operation_type: Optional[Union[str, "TriggerOperationType"]] = None, provisioning_status: Optional[Union[str, "ScheduleProvisioningStatus"]] = None, created_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, error: Optional["ErrorResponse"] = None, status_code: Optional[Union[str, "HttpStatusCode"]] = None, **kwargs ): """ :keyword id: :paramtype id: str :keyword operation_type: Possible values include: "Create", "Update", "Delete", "CreateOrUpdate". :paramtype operation_type: str or ~flow.models.TriggerOperationType :keyword provisioning_status: Possible values include: "Creating", "Updating", "Deleting", "Succeeded", "Failed", "Canceled". :paramtype provisioning_status: str or ~flow.models.ScheduleProvisioningStatus :keyword created_time: :paramtype created_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime :keyword error: The error response. :paramtype error: ~flow.models.ErrorResponse :keyword status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing", "EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent", "ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed", "MultipleChoices", "Ambiguous", "MovedPermanently", "Moved", "Found", "Redirect", "SeeOther", "RedirectMethod", "NotModified", "UseProxy", "Unused", "TemporaryRedirect", "RedirectKeepVerb", "PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired", "Forbidden", "NotFound", "MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired", "RequestTimeout", "Conflict", "Gone", "LengthRequired", "PreconditionFailed", "RequestEntityTooLarge", "RequestUriTooLong", "UnsupportedMediaType", "RequestedRangeNotSatisfiable", "ExpectationFailed", "MisdirectedRequest", "UnprocessableEntity", "Locked", "FailedDependency", "UpgradeRequired", "PreconditionRequired", "TooManyRequests", "RequestHeaderFieldsTooLarge", "UnavailableForLegalReasons", "InternalServerError", "NotImplemented", "BadGateway", "ServiceUnavailable", "GatewayTimeout", "HttpVersionNotSupported", "VariantAlsoNegotiates", "InsufficientStorage", "LoopDetected", "NotExtended", "NetworkAuthenticationRequired". :paramtype status_code: str or ~flow.models.HttpStatusCode """ super(TriggerAsyncOperationStatus, self).__init__(**kwargs) self.id = id self.operation_type = operation_type self.provisioning_status = provisioning_status self.created_time = created_time self.end_time = end_time self.error = error self.status_code = status_code class TuningNodeSetting(msrest.serialization.Model): """TuningNodeSetting. :ivar variant_ids: :vartype variant_ids: list[str] """ _attribute_map = { 'variant_ids': {'key': 'variantIds', 'type': '[str]'}, } def __init__( self, *, variant_ids: Optional[List[str]] = None, **kwargs ): """ :keyword variant_ids: :paramtype variant_ids: list[str] """ super(TuningNodeSetting, self).__init__(**kwargs) self.variant_ids = variant_ids class TypedAssetReference(msrest.serialization.Model): """TypedAssetReference. :ivar asset_id: :vartype asset_id: str :ivar type: :vartype type: str """ _attribute_map = { 'asset_id': {'key': 'assetId', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, *, asset_id: Optional[str] = None, type: Optional[str] = None, **kwargs ): """ :keyword asset_id: :paramtype asset_id: str :keyword type: :paramtype type: str """ super(TypedAssetReference, self).__init__(**kwargs) self.asset_id = asset_id self.type = type class UIAzureOpenAIDeploymentNameSelector(msrest.serialization.Model): """UIAzureOpenAIDeploymentNameSelector. :ivar capabilities: :vartype capabilities: ~flow.models.UIAzureOpenAIModelCapabilities """ _attribute_map = { 'capabilities': {'key': 'Capabilities', 'type': 'UIAzureOpenAIModelCapabilities'}, } def __init__( self, *, capabilities: Optional["UIAzureOpenAIModelCapabilities"] = None, **kwargs ): """ :keyword capabilities: :paramtype capabilities: ~flow.models.UIAzureOpenAIModelCapabilities """ super(UIAzureOpenAIDeploymentNameSelector, self).__init__(**kwargs) self.capabilities = capabilities class UIAzureOpenAIModelCapabilities(msrest.serialization.Model): """UIAzureOpenAIModelCapabilities. :ivar completion: :vartype completion: bool :ivar chat_completion: :vartype chat_completion: bool :ivar embeddings: :vartype embeddings: bool """ _attribute_map = { 'completion': {'key': 'Completion', 'type': 'bool'}, 'chat_completion': {'key': 'ChatCompletion', 'type': 'bool'}, 'embeddings': {'key': 'Embeddings', 'type': 'bool'}, } def __init__( self, *, completion: Optional[bool] = None, chat_completion: Optional[bool] = None, embeddings: Optional[bool] = None, **kwargs ): """ :keyword completion: :paramtype completion: bool :keyword chat_completion: :paramtype chat_completion: bool :keyword embeddings: :paramtype embeddings: bool """ super(UIAzureOpenAIModelCapabilities, self).__init__(**kwargs) self.completion = completion self.chat_completion = chat_completion self.embeddings = embeddings class UIColumnPicker(msrest.serialization.Model): """UIColumnPicker. :ivar column_picker_for: :vartype column_picker_for: str :ivar column_selection_categories: :vartype column_selection_categories: list[str] :ivar single_column_selection: :vartype single_column_selection: bool """ _attribute_map = { 'column_picker_for': {'key': 'columnPickerFor', 'type': 'str'}, 'column_selection_categories': {'key': 'columnSelectionCategories', 'type': '[str]'}, 'single_column_selection': {'key': 'singleColumnSelection', 'type': 'bool'}, } def __init__( self, *, column_picker_for: Optional[str] = None, column_selection_categories: Optional[List[str]] = None, single_column_selection: Optional[bool] = None, **kwargs ): """ :keyword column_picker_for: :paramtype column_picker_for: str :keyword column_selection_categories: :paramtype column_selection_categories: list[str] :keyword single_column_selection: :paramtype single_column_selection: bool """ super(UIColumnPicker, self).__init__(**kwargs) self.column_picker_for = column_picker_for self.column_selection_categories = column_selection_categories self.single_column_selection = single_column_selection class UIComputeSelection(msrest.serialization.Model): """UIComputeSelection. :ivar compute_types: :vartype compute_types: list[str] :ivar require_gpu: :vartype require_gpu: bool :ivar os_types: :vartype os_types: list[str] :ivar support_serverless: :vartype support_serverless: bool :ivar compute_run_settings_mapping: Dictionary of <componentsΒ·10my8ojΒ·schemasΒ·uicomputeselectionΒ·propertiesΒ·computerunsettingsmappingΒ·additionalproperties>. :vartype compute_run_settings_mapping: dict[str, list[~flow.models.RunSettingParameter]] """ _attribute_map = { 'compute_types': {'key': 'computeTypes', 'type': '[str]'}, 'require_gpu': {'key': 'requireGpu', 'type': 'bool'}, 'os_types': {'key': 'osTypes', 'type': '[str]'}, 'support_serverless': {'key': 'supportServerless', 'type': 'bool'}, 'compute_run_settings_mapping': {'key': 'computeRunSettingsMapping', 'type': '{[RunSettingParameter]}'}, } def __init__( self, *, compute_types: Optional[List[str]] = None, require_gpu: Optional[bool] = None, os_types: Optional[List[str]] = None, support_serverless: Optional[bool] = None, compute_run_settings_mapping: Optional[Dict[str, List["RunSettingParameter"]]] = None, **kwargs ): """ :keyword compute_types: :paramtype compute_types: list[str] :keyword require_gpu: :paramtype require_gpu: bool :keyword os_types: :paramtype os_types: list[str] :keyword support_serverless: :paramtype support_serverless: bool :keyword compute_run_settings_mapping: Dictionary of <componentsΒ·10my8ojΒ·schemasΒ·uicomputeselectionΒ·propertiesΒ·computerunsettingsmappingΒ·additionalproperties>. :paramtype compute_run_settings_mapping: dict[str, list[~flow.models.RunSettingParameter]] """ super(UIComputeSelection, self).__init__(**kwargs) self.compute_types = compute_types self.require_gpu = require_gpu self.os_types = os_types self.support_serverless = support_serverless self.compute_run_settings_mapping = compute_run_settings_mapping class UIHyperparameterConfiguration(msrest.serialization.Model): """UIHyperparameterConfiguration. :ivar model_name_to_hyper_parameter_and_distribution_mapping: Dictionary of <componentsΒ·1nrp69tΒ·schemasΒ·uihyperparameterconfigurationΒ·propertiesΒ·modelnametohyperparameteranddistributionmappingΒ·additionalproperties>. :vartype model_name_to_hyper_parameter_and_distribution_mapping: dict[str, dict[str, list[str]]] :ivar distribution_parameters_mapping: Dictionary of <componentsΒ·d9plq4Β·schemasΒ·uihyperparameterconfigurationΒ·propertiesΒ·distributionparametersmappingΒ·additionalproperties>. :vartype distribution_parameters_mapping: dict[str, list[~flow.models.DistributionParameter]] :ivar json_schema: :vartype json_schema: str """ _attribute_map = { 'model_name_to_hyper_parameter_and_distribution_mapping': {'key': 'modelNameToHyperParameterAndDistributionMapping', 'type': '{{[str]}}'}, 'distribution_parameters_mapping': {'key': 'distributionParametersMapping', 'type': '{[DistributionParameter]}'}, 'json_schema': {'key': 'jsonSchema', 'type': 'str'}, } def __init__( self, *, model_name_to_hyper_parameter_and_distribution_mapping: Optional[Dict[str, Dict[str, List[str]]]] = None, distribution_parameters_mapping: Optional[Dict[str, List["DistributionParameter"]]] = None, json_schema: Optional[str] = None, **kwargs ): """ :keyword model_name_to_hyper_parameter_and_distribution_mapping: Dictionary of <componentsΒ·1nrp69tΒ·schemasΒ·uihyperparameterconfigurationΒ·propertiesΒ·modelnametohyperparameteranddistributionmappingΒ·additionalproperties>. :paramtype model_name_to_hyper_parameter_and_distribution_mapping: dict[str, dict[str, list[str]]] :keyword distribution_parameters_mapping: Dictionary of <componentsΒ·d9plq4Β·schemasΒ·uihyperparameterconfigurationΒ·propertiesΒ·distributionparametersmappingΒ·additionalproperties>. :paramtype distribution_parameters_mapping: dict[str, list[~flow.models.DistributionParameter]] :keyword json_schema: :paramtype json_schema: str """ super(UIHyperparameterConfiguration, self).__init__(**kwargs) self.model_name_to_hyper_parameter_and_distribution_mapping = model_name_to_hyper_parameter_and_distribution_mapping self.distribution_parameters_mapping = distribution_parameters_mapping self.json_schema = json_schema class UIInputSetting(msrest.serialization.Model): """UIInputSetting. :ivar name: :vartype name: str :ivar data_delivery_mode: Possible values include: "Read-only mount", "Read-write mount", "Download", "Direct", "Evaluate mount", "Evaluate download", "Hdfs". :vartype data_delivery_mode: str or ~flow.models.UIInputDataDeliveryMode :ivar path_on_compute: :vartype path_on_compute: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'data_delivery_mode': {'key': 'dataDeliveryMode', 'type': 'str'}, 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, data_delivery_mode: Optional[Union[str, "UIInputDataDeliveryMode"]] = None, path_on_compute: Optional[str] = None, **kwargs ): """ :keyword name: :paramtype name: str :keyword data_delivery_mode: Possible values include: "Read-only mount", "Read-write mount", "Download", "Direct", "Evaluate mount", "Evaluate download", "Hdfs". :paramtype data_delivery_mode: str or ~flow.models.UIInputDataDeliveryMode :keyword path_on_compute: :paramtype path_on_compute: str """ super(UIInputSetting, self).__init__(**kwargs) self.name = name self.data_delivery_mode = data_delivery_mode self.path_on_compute = path_on_compute class UIJsonEditor(msrest.serialization.Model): """UIJsonEditor. :ivar json_schema: :vartype json_schema: str """ _attribute_map = { 'json_schema': {'key': 'jsonSchema', 'type': 'str'}, } def __init__( self, *, json_schema: Optional[str] = None, **kwargs ): """ :keyword json_schema: :paramtype json_schema: str """ super(UIJsonEditor, self).__init__(**kwargs) self.json_schema = json_schema class UIParameterHint(msrest.serialization.Model): """UIParameterHint. :ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection". :vartype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum :ivar column_picker: :vartype column_picker: ~flow.models.UIColumnPicker :ivar ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql". :vartype ui_script_language: str or ~flow.models.UIScriptLanguageEnum :ivar json_editor: :vartype json_editor: ~flow.models.UIJsonEditor :ivar prompt_flow_connection_selector: :vartype prompt_flow_connection_selector: ~flow.models.UIPromptFlowConnectionSelector :ivar azure_open_ai_deployment_name_selector: :vartype azure_open_ai_deployment_name_selector: ~flow.models.UIAzureOpenAIDeploymentNameSelector :ivar ux_ignore: :vartype ux_ignore: bool :ivar anonymous: :vartype anonymous: bool """ _attribute_map = { 'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'}, 'column_picker': {'key': 'columnPicker', 'type': 'UIColumnPicker'}, 'ui_script_language': {'key': 'uiScriptLanguage', 'type': 'str'}, 'json_editor': {'key': 'jsonEditor', 'type': 'UIJsonEditor'}, 'prompt_flow_connection_selector': {'key': 'PromptFlowConnectionSelector', 'type': 'UIPromptFlowConnectionSelector'}, 'azure_open_ai_deployment_name_selector': {'key': 'AzureOpenAIDeploymentNameSelector', 'type': 'UIAzureOpenAIDeploymentNameSelector'}, 'ux_ignore': {'key': 'UxIgnore', 'type': 'bool'}, 'anonymous': {'key': 'Anonymous', 'type': 'bool'}, } def __init__( self, *, ui_widget_type: Optional[Union[str, "UIWidgetTypeEnum"]] = None, column_picker: Optional["UIColumnPicker"] = None, ui_script_language: Optional[Union[str, "UIScriptLanguageEnum"]] = None, json_editor: Optional["UIJsonEditor"] = None, prompt_flow_connection_selector: Optional["UIPromptFlowConnectionSelector"] = None, azure_open_ai_deployment_name_selector: Optional["UIAzureOpenAIDeploymentNameSelector"] = None, ux_ignore: Optional[bool] = None, anonymous: Optional[bool] = None, **kwargs ): """ :keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection". :paramtype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum :keyword column_picker: :paramtype column_picker: ~flow.models.UIColumnPicker :keyword ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql". :paramtype ui_script_language: str or ~flow.models.UIScriptLanguageEnum :keyword json_editor: :paramtype json_editor: ~flow.models.UIJsonEditor :keyword prompt_flow_connection_selector: :paramtype prompt_flow_connection_selector: ~flow.models.UIPromptFlowConnectionSelector :keyword azure_open_ai_deployment_name_selector: :paramtype azure_open_ai_deployment_name_selector: ~flow.models.UIAzureOpenAIDeploymentNameSelector :keyword ux_ignore: :paramtype ux_ignore: bool :keyword anonymous: :paramtype anonymous: bool """ super(UIParameterHint, self).__init__(**kwargs) self.ui_widget_type = ui_widget_type self.column_picker = column_picker self.ui_script_language = ui_script_language self.json_editor = json_editor self.prompt_flow_connection_selector = prompt_flow_connection_selector self.azure_open_ai_deployment_name_selector = azure_open_ai_deployment_name_selector self.ux_ignore = ux_ignore self.anonymous = anonymous class UIPromptFlowConnectionSelector(msrest.serialization.Model): """UIPromptFlowConnectionSelector. :ivar prompt_flow_connection_type: :vartype prompt_flow_connection_type: str """ _attribute_map = { 'prompt_flow_connection_type': {'key': 'PromptFlowConnectionType', 'type': 'str'}, } def __init__( self, *, prompt_flow_connection_type: Optional[str] = None, **kwargs ): """ :keyword prompt_flow_connection_type: :paramtype prompt_flow_connection_type: str """ super(UIPromptFlowConnectionSelector, self).__init__(**kwargs) self.prompt_flow_connection_type = prompt_flow_connection_type class UIWidgetMetaInfo(msrest.serialization.Model): """UIWidgetMetaInfo. :ivar module_node_id: :vartype module_node_id: str :ivar meta_module_id: :vartype meta_module_id: str :ivar parameter_name: :vartype parameter_name: str :ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection". :vartype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum """ _attribute_map = { 'module_node_id': {'key': 'moduleNodeId', 'type': 'str'}, 'meta_module_id': {'key': 'metaModuleId', 'type': 'str'}, 'parameter_name': {'key': 'parameterName', 'type': 'str'}, 'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'}, } def __init__( self, *, module_node_id: Optional[str] = None, meta_module_id: Optional[str] = None, parameter_name: Optional[str] = None, ui_widget_type: Optional[Union[str, "UIWidgetTypeEnum"]] = None, **kwargs ): """ :keyword module_node_id: :paramtype module_node_id: str :keyword meta_module_id: :paramtype meta_module_id: str :keyword parameter_name: :paramtype parameter_name: str :keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection". :paramtype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum """ super(UIWidgetMetaInfo, self).__init__(**kwargs) self.module_node_id = module_node_id self.meta_module_id = meta_module_id self.parameter_name = parameter_name self.ui_widget_type = ui_widget_type class UIYamlEditor(msrest.serialization.Model): """UIYamlEditor. :ivar json_schema: :vartype json_schema: str """ _attribute_map = { 'json_schema': {'key': 'jsonSchema', 'type': 'str'}, } def __init__( self, *, json_schema: Optional[str] = None, **kwargs ): """ :keyword json_schema: :paramtype json_schema: str """ super(UIYamlEditor, self).__init__(**kwargs) self.json_schema = json_schema class UnversionedEntityRequestDto(msrest.serialization.Model): """UnversionedEntityRequestDto. :ivar unversioned_entity_ids: :vartype unversioned_entity_ids: list[str] """ _attribute_map = { 'unversioned_entity_ids': {'key': 'unversionedEntityIds', 'type': '[str]'}, } def __init__( self, *, unversioned_entity_ids: Optional[List[str]] = None, **kwargs ): """ :keyword unversioned_entity_ids: :paramtype unversioned_entity_ids: list[str] """ super(UnversionedEntityRequestDto, self).__init__(**kwargs) self.unversioned_entity_ids = unversioned_entity_ids class UnversionedEntityResponseDto(msrest.serialization.Model): """UnversionedEntityResponseDto. :ivar unversioned_entities: :vartype unversioned_entities: list[~flow.models.FlowIndexEntity] :ivar unversioned_entity_json_schema: Anything. :vartype unversioned_entity_json_schema: any :ivar normalized_request_charge: :vartype normalized_request_charge: float :ivar normalized_request_charge_period: :vartype normalized_request_charge_period: str """ _attribute_map = { 'unversioned_entities': {'key': 'unversionedEntities', 'type': '[FlowIndexEntity]'}, 'unversioned_entity_json_schema': {'key': 'unversionedEntityJsonSchema', 'type': 'object'}, 'normalized_request_charge': {'key': 'normalizedRequestCharge', 'type': 'float'}, 'normalized_request_charge_period': {'key': 'normalizedRequestChargePeriod', 'type': 'str'}, } def __init__( self, *, unversioned_entities: Optional[List["FlowIndexEntity"]] = None, unversioned_entity_json_schema: Optional[Any] = None, normalized_request_charge: Optional[float] = None, normalized_request_charge_period: Optional[str] = None, **kwargs ): """ :keyword unversioned_entities: :paramtype unversioned_entities: list[~flow.models.FlowIndexEntity] :keyword unversioned_entity_json_schema: Anything. :paramtype unversioned_entity_json_schema: any :keyword normalized_request_charge: :paramtype normalized_request_charge: float :keyword normalized_request_charge_period: :paramtype normalized_request_charge_period: str """ super(UnversionedEntityResponseDto, self).__init__(**kwargs) self.unversioned_entities = unversioned_entities self.unversioned_entity_json_schema = unversioned_entity_json_schema self.normalized_request_charge = normalized_request_charge self.normalized_request_charge_period = normalized_request_charge_period class UnversionedRebuildIndexDto(msrest.serialization.Model): """UnversionedRebuildIndexDto. :ivar continuation_token: :vartype continuation_token: str :ivar entity_count: :vartype entity_count: int :ivar entity_container_type: :vartype entity_container_type: str :ivar entity_type: :vartype entity_type: str :ivar resource_id: :vartype resource_id: str :ivar workspace_id: :vartype workspace_id: str :ivar immutable_resource_id: :vartype immutable_resource_id: str :ivar start_time: :vartype start_time: ~datetime.datetime :ivar end_time: :vartype end_time: ~datetime.datetime """ _attribute_map = { 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'entity_count': {'key': 'entityCount', 'type': 'int'}, 'entity_container_type': {'key': 'entityContainerType', 'type': 'str'}, 'entity_type': {'key': 'entityType', 'type': 'str'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'workspace_id': {'key': 'workspaceId', 'type': 'str'}, 'immutable_resource_id': {'key': 'immutableResourceId', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, } def __init__( self, *, continuation_token: Optional[str] = None, entity_count: Optional[int] = None, entity_container_type: Optional[str] = None, entity_type: Optional[str] = None, resource_id: Optional[str] = None, workspace_id: Optional[str] = None, immutable_resource_id: Optional[str] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, **kwargs ): """ :keyword continuation_token: :paramtype continuation_token: str :keyword entity_count: :paramtype entity_count: int :keyword entity_container_type: :paramtype entity_container_type: str :keyword entity_type: :paramtype entity_type: str :keyword resource_id: :paramtype resource_id: str :keyword workspace_id: :paramtype workspace_id: str :keyword immutable_resource_id: :paramtype immutable_resource_id: str :keyword start_time: :paramtype start_time: ~datetime.datetime :keyword end_time: :paramtype end_time: ~datetime.datetime """ super(UnversionedRebuildIndexDto, self).__init__(**kwargs) self.continuation_token = continuation_token self.entity_count = entity_count self.entity_container_type = entity_container_type self.entity_type = entity_type self.resource_id = resource_id self.workspace_id = workspace_id self.immutable_resource_id = immutable_resource_id self.start_time = start_time self.end_time = end_time class UnversionedRebuildResponseDto(msrest.serialization.Model): """UnversionedRebuildResponseDto. :ivar entities: :vartype entities: ~flow.models.SegmentedResult1 :ivar unversioned_entity_schema: Anything. :vartype unversioned_entity_schema: any :ivar normalized_request_charge: :vartype normalized_request_charge: float :ivar normalized_request_charge_period: :vartype normalized_request_charge_period: str """ _attribute_map = { 'entities': {'key': 'entities', 'type': 'SegmentedResult1'}, 'unversioned_entity_schema': {'key': 'unversionedEntitySchema', 'type': 'object'}, 'normalized_request_charge': {'key': 'normalizedRequestCharge', 'type': 'float'}, 'normalized_request_charge_period': {'key': 'normalizedRequestChargePeriod', 'type': 'str'}, } def __init__( self, *, entities: Optional["SegmentedResult1"] = None, unversioned_entity_schema: Optional[Any] = None, normalized_request_charge: Optional[float] = None, normalized_request_charge_period: Optional[str] = None, **kwargs ): """ :keyword entities: :paramtype entities: ~flow.models.SegmentedResult1 :keyword unversioned_entity_schema: Anything. :paramtype unversioned_entity_schema: any :keyword normalized_request_charge: :paramtype normalized_request_charge: float :keyword normalized_request_charge_period: :paramtype normalized_request_charge_period: str """ super(UnversionedRebuildResponseDto, self).__init__(**kwargs) self.entities = entities self.unversioned_entity_schema = unversioned_entity_schema self.normalized_request_charge = normalized_request_charge self.normalized_request_charge_period = normalized_request_charge_period class UpdateComponentRequest(msrest.serialization.Model): """UpdateComponentRequest. :ivar display_name: :vartype display_name: str :ivar description: :vartype description: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar module_update_operation_type: Possible values include: "SetDefaultVersion", "EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags". :vartype module_update_operation_type: str or ~flow.models.ModuleUpdateOperationType :ivar module_version: :vartype module_version: str """ _attribute_map = { 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'module_update_operation_type': {'key': 'moduleUpdateOperationType', 'type': 'str'}, 'module_version': {'key': 'moduleVersion', 'type': 'str'}, } def __init__( self, *, display_name: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, module_update_operation_type: Optional[Union[str, "ModuleUpdateOperationType"]] = None, module_version: Optional[str] = None, **kwargs ): """ :keyword display_name: :paramtype display_name: str :keyword description: :paramtype description: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword module_update_operation_type: Possible values include: "SetDefaultVersion", "EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags". :paramtype module_update_operation_type: str or ~flow.models.ModuleUpdateOperationType :keyword module_version: :paramtype module_version: str """ super(UpdateComponentRequest, self).__init__(**kwargs) self.display_name = display_name self.description = description self.tags = tags self.module_update_operation_type = module_update_operation_type self.module_version = module_version class UpdateFlowRequest(msrest.serialization.Model): """UpdateFlowRequest. :ivar flow_run_result: :vartype flow_run_result: ~flow.models.FlowRunResult :ivar flow_test_mode: Possible values include: "Sync", "Async". :vartype flow_test_mode: str or ~flow.models.FlowTestMode :ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :ivar flow_name: :vartype flow_name: str :ivar description: :vartype description: str :ivar details: :vartype details: str :ivar tags: A set of tags. Dictionary of :code:`<string>`. :vartype tags: dict[str, str] :ivar flow: :vartype flow: ~flow.models.Flow :ivar flow_definition_file_path: :vartype flow_definition_file_path: str :ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :vartype flow_type: str or ~flow.models.FlowType :ivar flow_run_settings: :vartype flow_run_settings: ~flow.models.FlowRunSettings :ivar is_archived: :vartype is_archived: bool :ivar vm_size: :vartype vm_size: str :ivar max_idle_time_seconds: :vartype max_idle_time_seconds: long :ivar identity: :vartype identity: str """ _attribute_map = { 'flow_run_result': {'key': 'flowRunResult', 'type': 'FlowRunResult'}, 'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'}, 'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'}, 'flow_name': {'key': 'flowName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'details': {'key': 'details', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'flow': {'key': 'flow', 'type': 'Flow'}, 'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'}, 'flow_type': {'key': 'flowType', 'type': 'str'}, 'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'}, 'is_archived': {'key': 'isArchived', 'type': 'bool'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'}, 'identity': {'key': 'identity', 'type': 'str'}, } def __init__( self, *, flow_run_result: Optional["FlowRunResult"] = None, flow_test_mode: Optional[Union[str, "FlowTestMode"]] = None, flow_test_infos: Optional[Dict[str, "FlowTestInfo"]] = None, flow_name: Optional[str] = None, description: Optional[str] = None, details: Optional[str] = None, tags: Optional[Dict[str, str]] = None, flow: Optional["Flow"] = None, flow_definition_file_path: Optional[str] = None, flow_type: Optional[Union[str, "FlowType"]] = None, flow_run_settings: Optional["FlowRunSettings"] = None, is_archived: Optional[bool] = None, vm_size: Optional[str] = None, max_idle_time_seconds: Optional[int] = None, identity: Optional[str] = None, **kwargs ): """ :keyword flow_run_result: :paramtype flow_run_result: ~flow.models.FlowRunResult :keyword flow_test_mode: Possible values include: "Sync", "Async". :paramtype flow_test_mode: str or ~flow.models.FlowTestMode :keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`. :paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo] :keyword flow_name: :paramtype flow_name: str :keyword description: :paramtype description: str :keyword details: :paramtype details: str :keyword tags: A set of tags. Dictionary of :code:`<string>`. :paramtype tags: dict[str, str] :keyword flow: :paramtype flow: ~flow.models.Flow :keyword flow_definition_file_path: :paramtype flow_definition_file_path: str :keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag". :paramtype flow_type: str or ~flow.models.FlowType :keyword flow_run_settings: :paramtype flow_run_settings: ~flow.models.FlowRunSettings :keyword is_archived: :paramtype is_archived: bool :keyword vm_size: :paramtype vm_size: str :keyword max_idle_time_seconds: :paramtype max_idle_time_seconds: long :keyword identity: :paramtype identity: str """ super(UpdateFlowRequest, self).__init__(**kwargs) self.flow_run_result = flow_run_result self.flow_test_mode = flow_test_mode self.flow_test_infos = flow_test_infos self.flow_name = flow_name self.description = description self.details = details self.tags = tags self.flow = flow self.flow_definition_file_path = flow_definition_file_path self.flow_type = flow_type self.flow_run_settings = flow_run_settings self.is_archived = is_archived self.vm_size = vm_size self.max_idle_time_seconds = max_idle_time_seconds self.identity = identity class UpdateFlowRuntimeRequest(msrest.serialization.Model): """UpdateFlowRuntimeRequest. :ivar runtime_description: :vartype runtime_description: str :ivar environment: :vartype environment: str :ivar instance_count: :vartype instance_count: int """ _attribute_map = { 'runtime_description': {'key': 'runtimeDescription', 'type': 'str'}, 'environment': {'key': 'environment', 'type': 'str'}, 'instance_count': {'key': 'instanceCount', 'type': 'int'}, } def __init__( self, *, runtime_description: Optional[str] = None, environment: Optional[str] = None, instance_count: Optional[int] = None, **kwargs ): """ :keyword runtime_description: :paramtype runtime_description: str :keyword environment: :paramtype environment: str :keyword instance_count: :paramtype instance_count: int """ super(UpdateFlowRuntimeRequest, self).__init__(**kwargs) self.runtime_description = runtime_description self.environment = environment self.instance_count = instance_count class UpdateRegistryComponentRequest(msrest.serialization.Model): """UpdateRegistryComponentRequest. :ivar registry_name: :vartype registry_name: str :ivar component_name: :vartype component_name: str :ivar component_version: :vartype component_version: str :ivar update_type: The only acceptable values to pass in are None and "SetDefaultVersion". The default value is None. :vartype update_type: str """ _attribute_map = { 'registry_name': {'key': 'registryName', 'type': 'str'}, 'component_name': {'key': 'componentName', 'type': 'str'}, 'component_version': {'key': 'componentVersion', 'type': 'str'}, 'update_type': {'key': 'updateType', 'type': 'str'}, } def __init__( self, *, registry_name: Optional[str] = None, component_name: Optional[str] = None, component_version: Optional[str] = None, update_type: Optional[str] = None, **kwargs ): """ :keyword registry_name: :paramtype registry_name: str :keyword component_name: :paramtype component_name: str :keyword component_version: :paramtype component_version: str :keyword update_type: The only acceptable values to pass in are None and "SetDefaultVersion". The default value is None. :paramtype update_type: str """ super(UpdateRegistryComponentRequest, self).__init__(**kwargs) self.registry_name = registry_name self.component_name = component_name self.component_version = component_version self.update_type = update_type class UploadOptions(msrest.serialization.Model): """UploadOptions. :ivar overwrite: :vartype overwrite: bool :ivar source_globs: :vartype source_globs: ~flow.models.ExecutionGlobsOptions """ _attribute_map = { 'overwrite': {'key': 'overwrite', 'type': 'bool'}, 'source_globs': {'key': 'sourceGlobs', 'type': 'ExecutionGlobsOptions'}, } def __init__( self, *, overwrite: Optional[bool] = None, source_globs: Optional["ExecutionGlobsOptions"] = None, **kwargs ): """ :keyword overwrite: :paramtype overwrite: bool :keyword source_globs: :paramtype source_globs: ~flow.models.ExecutionGlobsOptions """ super(UploadOptions, self).__init__(**kwargs) self.overwrite = overwrite self.source_globs = source_globs class UriReference(msrest.serialization.Model): """UriReference. :ivar path: :vartype path: str :ivar is_file: :vartype is_file: bool """ _attribute_map = { 'path': {'key': 'path', 'type': 'str'}, 'is_file': {'key': 'isFile', 'type': 'bool'}, } def __init__( self, *, path: Optional[str] = None, is_file: Optional[bool] = None, **kwargs ): """ :keyword path: :paramtype path: str :keyword is_file: :paramtype is_file: bool """ super(UriReference, self).__init__(**kwargs) self.path = path self.is_file = is_file class User(msrest.serialization.Model): """User. :ivar user_object_id: A user or service principal's object ID. This is EUPI and may only be logged to warm path telemetry. :vartype user_object_id: str :ivar user_pu_id: A user or service principal's PuID. This is PII and should never be logged. :vartype user_pu_id: str :ivar user_idp: A user identity provider. Eg live.com This is PII and should never be logged. :vartype user_idp: str :ivar user_alt_sec_id: A user alternate sec id. This represents the user in a different identity provider system Eg.1:live.com:puid This is PII and should never be logged. :vartype user_alt_sec_id: str :ivar user_iss: The issuer which issed the token for this user. This is PII and should never be logged. :vartype user_iss: str :ivar user_tenant_id: A user or service principal's tenant ID. :vartype user_tenant_id: str :ivar user_name: A user's full name or a service principal's app ID. This is PII and should never be logged. :vartype user_name: str :ivar upn: A user's Principal name (upn) This is PII andshould never be logged. :vartype upn: str """ _attribute_map = { 'user_object_id': {'key': 'userObjectId', 'type': 'str'}, 'user_pu_id': {'key': 'userPuId', 'type': 'str'}, 'user_idp': {'key': 'userIdp', 'type': 'str'}, 'user_alt_sec_id': {'key': 'userAltSecId', 'type': 'str'}, 'user_iss': {'key': 'userIss', 'type': 'str'}, 'user_tenant_id': {'key': 'userTenantId', 'type': 'str'}, 'user_name': {'key': 'userName', 'type': 'str'}, 'upn': {'key': 'upn', 'type': 'str'}, } def __init__( self, *, user_object_id: Optional[str] = None, user_pu_id: Optional[str] = None, user_idp: Optional[str] = None, user_alt_sec_id: Optional[str] = None, user_iss: Optional[str] = None, user_tenant_id: Optional[str] = None, user_name: Optional[str] = None, upn: Optional[str] = None, **kwargs ): """ :keyword user_object_id: A user or service principal's object ID. This is EUPI and may only be logged to warm path telemetry. :paramtype user_object_id: str :keyword user_pu_id: A user or service principal's PuID. This is PII and should never be logged. :paramtype user_pu_id: str :keyword user_idp: A user identity provider. Eg live.com This is PII and should never be logged. :paramtype user_idp: str :keyword user_alt_sec_id: A user alternate sec id. This represents the user in a different identity provider system Eg.1:live.com:puid This is PII and should never be logged. :paramtype user_alt_sec_id: str :keyword user_iss: The issuer which issed the token for this user. This is PII and should never be logged. :paramtype user_iss: str :keyword user_tenant_id: A user or service principal's tenant ID. :paramtype user_tenant_id: str :keyword user_name: A user's full name or a service principal's app ID. This is PII and should never be logged. :paramtype user_name: str :keyword upn: A user's Principal name (upn) This is PII andshould never be logged. :paramtype upn: str """ super(User, self).__init__(**kwargs) self.user_object_id = user_object_id self.user_pu_id = user_pu_id self.user_idp = user_idp self.user_alt_sec_id = user_alt_sec_id self.user_iss = user_iss self.user_tenant_id = user_tenant_id self.user_name = user_name self.upn = upn class UserAssignedIdentity(msrest.serialization.Model): """UserAssignedIdentity. :ivar principal_id: :vartype principal_id: str :ivar client_id: :vartype client_id: str """ _attribute_map = { 'principal_id': {'key': 'principalId', 'type': 'str'}, 'client_id': {'key': 'clientId', 'type': 'str'}, } def __init__( self, *, principal_id: Optional[str] = None, client_id: Optional[str] = None, **kwargs ): """ :keyword principal_id: :paramtype principal_id: str :keyword client_id: :paramtype client_id: str """ super(UserAssignedIdentity, self).__init__(**kwargs) self.principal_id = principal_id self.client_id = client_id class ValidationDataSettings(msrest.serialization.Model): """ValidationDataSettings. :ivar n_cross_validations: :vartype n_cross_validations: ~flow.models.NCrossValidations :ivar validation_data_size: :vartype validation_data_size: float :ivar cv_split_column_names: :vartype cv_split_column_names: list[str] :ivar validation_type: :vartype validation_type: str """ _attribute_map = { 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, 'validation_type': {'key': 'validationType', 'type': 'str'}, } def __init__( self, *, n_cross_validations: Optional["NCrossValidations"] = None, validation_data_size: Optional[float] = None, cv_split_column_names: Optional[List[str]] = None, validation_type: Optional[str] = None, **kwargs ): """ :keyword n_cross_validations: :paramtype n_cross_validations: ~flow.models.NCrossValidations :keyword validation_data_size: :paramtype validation_data_size: float :keyword cv_split_column_names: :paramtype cv_split_column_names: list[str] :keyword validation_type: :paramtype validation_type: str """ super(ValidationDataSettings, self).__init__(**kwargs) self.n_cross_validations = n_cross_validations self.validation_data_size = validation_data_size self.cv_split_column_names = cv_split_column_names self.validation_type = validation_type class VariantNode(msrest.serialization.Model): """VariantNode. :ivar node: :vartype node: ~flow.models.Node :ivar description: :vartype description: str """ _attribute_map = { 'node': {'key': 'node', 'type': 'Node'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__( self, *, node: Optional["Node"] = None, description: Optional[str] = None, **kwargs ): """ :keyword node: :paramtype node: ~flow.models.Node :keyword description: :paramtype description: str """ super(VariantNode, self).__init__(**kwargs) self.node = node self.description = description class Webhook(msrest.serialization.Model): """Webhook. :ivar webhook_type: The only acceptable values to pass in are None and "AzureDevOps". The default value is None. :vartype webhook_type: str :ivar event_type: :vartype event_type: str """ _attribute_map = { 'webhook_type': {'key': 'webhookType', 'type': 'str'}, 'event_type': {'key': 'eventType', 'type': 'str'}, } def __init__( self, *, webhook_type: Optional[str] = None, event_type: Optional[str] = None, **kwargs ): """ :keyword webhook_type: The only acceptable values to pass in are None and "AzureDevOps". The default value is None. :paramtype webhook_type: str :keyword event_type: :paramtype event_type: str """ super(Webhook, self).__init__(**kwargs) self.webhook_type = webhook_type self.event_type = event_type class WebServiceComputeMetaInfo(msrest.serialization.Model): """WebServiceComputeMetaInfo. :ivar node_count: :vartype node_count: int :ivar is_ssl_enabled: :vartype is_ssl_enabled: bool :ivar aks_not_found: :vartype aks_not_found: bool :ivar cluster_purpose: :vartype cluster_purpose: str :ivar public_ip_address: :vartype public_ip_address: str :ivar vm_size: :vartype vm_size: str :ivar location: :vartype location: str :ivar provisioning_state: :vartype provisioning_state: str :ivar state: :vartype state: str :ivar os_type: :vartype os_type: str :ivar id: :vartype id: str :ivar name: :vartype name: str :ivar created_by_studio: :vartype created_by_studio: bool :ivar is_gpu_type: :vartype is_gpu_type: bool :ivar resource_id: :vartype resource_id: str :ivar compute_type: :vartype compute_type: str """ _attribute_map = { 'node_count': {'key': 'nodeCount', 'type': 'int'}, 'is_ssl_enabled': {'key': 'isSslEnabled', 'type': 'bool'}, 'aks_not_found': {'key': 'aksNotFound', 'type': 'bool'}, 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'}, 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'}, 'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, } def __init__( self, *, node_count: Optional[int] = None, is_ssl_enabled: Optional[bool] = None, aks_not_found: Optional[bool] = None, cluster_purpose: Optional[str] = None, public_ip_address: Optional[str] = None, vm_size: Optional[str] = None, location: Optional[str] = None, provisioning_state: Optional[str] = None, state: Optional[str] = None, os_type: Optional[str] = None, id: Optional[str] = None, name: Optional[str] = None, created_by_studio: Optional[bool] = None, is_gpu_type: Optional[bool] = None, resource_id: Optional[str] = None, compute_type: Optional[str] = None, **kwargs ): """ :keyword node_count: :paramtype node_count: int :keyword is_ssl_enabled: :paramtype is_ssl_enabled: bool :keyword aks_not_found: :paramtype aks_not_found: bool :keyword cluster_purpose: :paramtype cluster_purpose: str :keyword public_ip_address: :paramtype public_ip_address: str :keyword vm_size: :paramtype vm_size: str :keyword location: :paramtype location: str :keyword provisioning_state: :paramtype provisioning_state: str :keyword state: :paramtype state: str :keyword os_type: :paramtype os_type: str :keyword id: :paramtype id: str :keyword name: :paramtype name: str :keyword created_by_studio: :paramtype created_by_studio: bool :keyword is_gpu_type: :paramtype is_gpu_type: bool :keyword resource_id: :paramtype resource_id: str :keyword compute_type: :paramtype compute_type: str """ super(WebServiceComputeMetaInfo, self).__init__(**kwargs) self.node_count = node_count self.is_ssl_enabled = is_ssl_enabled self.aks_not_found = aks_not_found self.cluster_purpose = cluster_purpose self.public_ip_address = public_ip_address self.vm_size = vm_size self.location = location self.provisioning_state = provisioning_state self.state = state self.os_type = os_type self.id = id self.name = name self.created_by_studio = created_by_studio self.is_gpu_type = is_gpu_type self.resource_id = resource_id self.compute_type = compute_type class WebServicePort(msrest.serialization.Model): """WebServicePort. :ivar node_id: :vartype node_id: str :ivar port_name: :vartype port_name: str :ivar name: :vartype name: str """ _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, 'port_name': {'key': 'portName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, *, node_id: Optional[str] = None, port_name: Optional[str] = None, name: Optional[str] = None, **kwargs ): """ :keyword node_id: :paramtype node_id: str :keyword port_name: :paramtype port_name: str :keyword name: :paramtype name: str """ super(WebServicePort, self).__init__(**kwargs) self.node_id = node_id self.port_name = port_name self.name = name class WorkspaceConnectionSpec(msrest.serialization.Model): """WorkspaceConnectionSpec. :ivar connection_category: Possible values include: "PythonFeed", "ACR", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "AzureDataLakeGen2", "Redis", "ApiKey", "AzureOpenAI", "CognitiveSearch", "CognitiveService", "CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", "CosmosDbMongoDbApi", "AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", "AzureSqlMi", "AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", "AmazonRedshift", "Db2", "Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", "Informix", "MariaDb", "MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", "PostgreSql", "Presto", "SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", "Sybase", "Teradata", "Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", "AmazonS3Compatible", "FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", "OracleCloudStorage", "Sftp", "GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", "Concur", "Dynamics", "DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", "Magento", "Marketo", "Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", "QuickBooks", "Salesforce", "SalesforceServiceCloud", "SalesforceMarketingCloud", "SapCloudForCustomer", "SapEcc", "ServiceNow", "SharePointOnlineList", "Shopify", "Square", "WebTable", "Xero", "Zoho", "GenericContainerRegistry". :vartype connection_category: str or ~flow.models.ConnectionCategory :ivar flow_value_type: Possible values include: "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image". :vartype flow_value_type: str or ~flow.models.ValueType :ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :vartype connection_type: str or ~flow.models.ConnectionType :ivar connection_type_display_name: :vartype connection_type_display_name: str :ivar config_specs: :vartype config_specs: list[~flow.models.ConnectionConfigSpec] :ivar module: :vartype module: str """ _attribute_map = { 'connection_category': {'key': 'connectionCategory', 'type': 'str'}, 'flow_value_type': {'key': 'flowValueType', 'type': 'str'}, 'connection_type': {'key': 'connectionType', 'type': 'str'}, 'connection_type_display_name': {'key': 'connectionTypeDisplayName', 'type': 'str'}, 'config_specs': {'key': 'configSpecs', 'type': '[ConnectionConfigSpec]'}, 'module': {'key': 'module', 'type': 'str'}, } def __init__( self, *, connection_category: Optional[Union[str, "ConnectionCategory"]] = None, flow_value_type: Optional[Union[str, "ValueType"]] = None, connection_type: Optional[Union[str, "ConnectionType"]] = None, connection_type_display_name: Optional[str] = None, config_specs: Optional[List["ConnectionConfigSpec"]] = None, module: Optional[str] = None, **kwargs ): """ :keyword connection_category: Possible values include: "PythonFeed", "ACR", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "AzureDataLakeGen2", "Redis", "ApiKey", "AzureOpenAI", "CognitiveSearch", "CognitiveService", "CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", "CosmosDbMongoDbApi", "AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", "AzureSqlMi", "AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", "AmazonRedshift", "Db2", "Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", "Informix", "MariaDb", "MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", "PostgreSql", "Presto", "SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", "Sybase", "Teradata", "Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", "AmazonS3Compatible", "FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", "OracleCloudStorage", "Sftp", "GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", "Concur", "Dynamics", "DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", "Magento", "Marketo", "Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", "QuickBooks", "Salesforce", "SalesforceServiceCloud", "SalesforceMarketingCloud", "SapCloudForCustomer", "SapEcc", "ServiceNow", "SharePointOnlineList", "Shopify", "Square", "WebTable", "Xero", "Zoho", "GenericContainerRegistry". :paramtype connection_category: str or ~flow.models.ConnectionCategory :keyword flow_value_type: Possible values include: "int", "double", "bool", "string", "secret", "prompt_template", "object", "list", "BingConnection", "OpenAIConnection", "AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection", "AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection", "SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection", "function_list", "function_str", "FormRecognizerConnection", "file_path", "image". :paramtype flow_value_type: str or ~flow.models.ValueType :keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing", "AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM", "Pinecone", "Qdrant", "Weaviate", "FormRecognizer". :paramtype connection_type: str or ~flow.models.ConnectionType :keyword connection_type_display_name: :paramtype connection_type_display_name: str :keyword config_specs: :paramtype config_specs: list[~flow.models.ConnectionConfigSpec] :keyword module: :paramtype module: str """ super(WorkspaceConnectionSpec, self).__init__(**kwargs) self.connection_category = connection_category self.flow_value_type = flow_value_type self.connection_type = connection_type self.connection_type_display_name = connection_type_display_name self.config_specs = config_specs self.module = module
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/models/__init__.py
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected]) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- try: from ._models_py3 import ACIAdvanceSettings from ._models_py3 import AEVAComputeConfiguration from ._models_py3 import AEVAResourceConfiguration from ._models_py3 import AISuperComputerConfiguration from ._models_py3 import AISuperComputerScalePolicy from ._models_py3 import AISuperComputerStorageReferenceConfiguration from ._models_py3 import AKSAdvanceSettings from ._models_py3 import AKSReplicaStatus from ._models_py3 import AMLComputeConfiguration from ._models_py3 import APCloudConfiguration from ._models_py3 import Activate from ._models_py3 import AdditionalErrorInfo from ._models_py3 import AdhocTriggerScheduledCommandJobRequest from ._models_py3 import AdhocTriggerScheduledSparkJobRequest from ._models_py3 import AetherAPCloudConfiguration from ._models_py3 import AetherAmlDataset from ._models_py3 import AetherAmlSparkCloudSetting from ._models_py3 import AetherArgumentAssignment from ._models_py3 import AetherAssetDefinition from ._models_py3 import AetherAssetOutputSettings from ._models_py3 import AetherAutoFeaturizeConfiguration from ._models_py3 import AetherAutoMLComponentConfiguration from ._models_py3 import AetherAutoTrainConfiguration from ._models_py3 import AetherAzureBlobReference from ._models_py3 import AetherAzureDataLakeGen2Reference from ._models_py3 import AetherAzureDataLakeReference from ._models_py3 import AetherAzureDatabaseReference from ._models_py3 import AetherAzureFilesReference from ._models_py3 import AetherBatchAiComputeInfo from ._models_py3 import AetherBuildArtifactInfo from ._models_py3 import AetherCloudBuildDropPathInfo from ._models_py3 import AetherCloudBuildInfo from ._models_py3 import AetherCloudBuildQueueInfo from ._models_py3 import AetherCloudPrioritySetting from ._models_py3 import AetherCloudSettings from ._models_py3 import AetherColumnTransformer from ._models_py3 import AetherComputeConfiguration from ._models_py3 import AetherComputeSetting from ._models_py3 import AetherControlInput from ._models_py3 import AetherControlOutput from ._models_py3 import AetherCopyDataTask from ._models_py3 import AetherCosmosReference from ._models_py3 import AetherCreatedBy from ._models_py3 import AetherCustomReference from ._models_py3 import AetherDBFSReference from ._models_py3 import AetherDataLocation from ._models_py3 import AetherDataLocationReuseCalculationFields from ._models_py3 import AetherDataPath from ._models_py3 import AetherDataReference from ._models_py3 import AetherDataSetDefinition from ._models_py3 import AetherDataSetDefinitionValue from ._models_py3 import AetherDataSettings from ._models_py3 import AetherDataTransferCloudConfiguration from ._models_py3 import AetherDataTransferSink from ._models_py3 import AetherDataTransferSource from ._models_py3 import AetherDataTransferV2CloudSetting from ._models_py3 import AetherDatabaseSink from ._models_py3 import AetherDatabaseSource from ._models_py3 import AetherDatabricksComputeInfo from ._models_py3 import AetherDatasetOutput from ._models_py3 import AetherDatasetOutputOptions from ._models_py3 import AetherDatasetRegistration from ._models_py3 import AetherDatastoreSetting from ._models_py3 import AetherDoWhileControlFlowInfo from ._models_py3 import AetherDoWhileControlFlowRunSettings from ._models_py3 import AetherDockerSettingConfiguration from ._models_py3 import AetherEntityInterfaceDocumentation from ._models_py3 import AetherEntrySetting from ._models_py3 import AetherEnvironmentConfiguration from ._models_py3 import AetherEsCloudConfiguration from ._models_py3 import AetherExportDataTask from ._models_py3 import AetherFeaturizationSettings from ._models_py3 import AetherFileSystem from ._models_py3 import AetherForecastHorizon from ._models_py3 import AetherForecastingSettings from ._models_py3 import AetherGeneralSettings from ._models_py3 import AetherGlobsOptions from ._models_py3 import AetherGraphControlNode from ._models_py3 import AetherGraphControlReferenceNode from ._models_py3 import AetherGraphDatasetNode from ._models_py3 import AetherGraphEdge from ._models_py3 import AetherGraphEntity from ._models_py3 import AetherGraphModuleNode from ._models_py3 import AetherGraphReferenceNode from ._models_py3 import AetherHdfsReference from ._models_py3 import AetherHdiClusterComputeInfo from ._models_py3 import AetherHdiRunConfiguration from ._models_py3 import AetherHyperDriveConfiguration from ._models_py3 import AetherIdentitySetting from ._models_py3 import AetherImportDataTask from ._models_py3 import AetherInputSetting from ._models_py3 import AetherInteractiveConfig from ._models_py3 import AetherK8SConfiguration from ._models_py3 import AetherLegacyDataPath from ._models_py3 import AetherLimitSettings from ._models_py3 import AetherMlcComputeInfo from ._models_py3 import AetherModuleEntity from ._models_py3 import AetherModuleExtendedProperties from ._models_py3 import AetherNCrossValidations from ._models_py3 import AetherOutputSetting from ._models_py3 import AetherParallelForControlFlowInfo from ._models_py3 import AetherParameterAssignment from ._models_py3 import AetherPhillyHdfsReference from ._models_py3 import AetherPortInfo from ._models_py3 import AetherPriorityConfig from ._models_py3 import AetherPriorityConfiguration from ._models_py3 import AetherRegisteredDataSetReference from ._models_py3 import AetherRemoteDockerComputeInfo from ._models_py3 import AetherResourceAssignment from ._models_py3 import AetherResourceAttributeAssignment from ._models_py3 import AetherResourceAttributeDefinition from ._models_py3 import AetherResourceConfig from ._models_py3 import AetherResourceConfiguration from ._models_py3 import AetherResourceModel from ._models_py3 import AetherResourcesSetting from ._models_py3 import AetherSavedDataSetReference from ._models_py3 import AetherScopeCloudConfiguration from ._models_py3 import AetherSeasonality from ._models_py3 import AetherSqlDataPath from ._models_py3 import AetherStackEnsembleSettings from ._models_py3 import AetherStoredProcedureParameter from ._models_py3 import AetherStructuredInterface from ._models_py3 import AetherStructuredInterfaceInput from ._models_py3 import AetherStructuredInterfaceOutput from ._models_py3 import AetherStructuredInterfaceParameter from ._models_py3 import AetherSubGraphConfiguration from ._models_py3 import AetherSweepEarlyTerminationPolicy from ._models_py3 import AetherSweepSettings from ._models_py3 import AetherSweepSettingsLimits from ._models_py3 import AetherTargetLags from ._models_py3 import AetherTargetRollingWindowSize from ._models_py3 import AetherTargetSelectorConfiguration from ._models_py3 import AetherTestDataSettings from ._models_py3 import AetherTorchDistributedConfiguration from ._models_py3 import AetherTrainingOutput from ._models_py3 import AetherTrainingSettings from ._models_py3 import AetherUIAzureOpenAIDeploymentNameSelector from ._models_py3 import AetherUIAzureOpenAIModelCapabilities from ._models_py3 import AetherUIColumnPicker from ._models_py3 import AetherUIJsonEditor from ._models_py3 import AetherUIParameterHint from ._models_py3 import AetherUIPromptFlowConnectionSelector from ._models_py3 import AetherValidationDataSettings from ._models_py3 import AetherVsoBuildArtifactInfo from ._models_py3 import AetherVsoBuildDefinitionInfo from ._models_py3 import AetherVsoBuildInfo from ._models_py3 import AmlDataset from ._models_py3 import AmlK8SConfiguration from ._models_py3 import AmlK8SPriorityConfiguration from ._models_py3 import AmlSparkCloudSetting from ._models_py3 import ApiAndParameters from ._models_py3 import ApplicationEndpointConfiguration from ._models_py3 import ArgumentAssignment from ._models_py3 import Asset from ._models_py3 import AssetDefinition from ._models_py3 import AssetNameAndVersionIdentifier from ._models_py3 import AssetOutputSettings from ._models_py3 import AssetOutputSettingsParameter from ._models_py3 import AssetPublishResult from ._models_py3 import AssetPublishSingleRegionResult from ._models_py3 import AssetTypeMetaInfo from ._models_py3 import AssetVersionPublishRequest from ._models_py3 import AssignedUser from ._models_py3 import AuthKeys from ._models_py3 import AutoClusterComputeSpecification from ._models_py3 import AutoDeleteSetting from ._models_py3 import AutoFeaturizeConfiguration from ._models_py3 import AutoMLComponentConfiguration from ._models_py3 import AutoScaler from ._models_py3 import AutoTrainConfiguration from ._models_py3 import AutologgerSettings from ._models_py3 import AvailabilityResponse from ._models_py3 import AzureBlobReference from ._models_py3 import AzureDataLakeGen2Reference from ._models_py3 import AzureDataLakeReference from ._models_py3 import AzureDatabaseReference from ._models_py3 import AzureFilesReference from ._models_py3 import AzureMLModuleVersionDescriptor from ._models_py3 import AzureOpenAIDeploymentDto from ._models_py3 import AzureOpenAIModelCapabilities from ._models_py3 import BatchAiComputeInfo from ._models_py3 import BatchDataInput from ._models_py3 import BatchExportComponentSpecResponse from ._models_py3 import BatchExportRawComponentResponse from ._models_py3 import BatchGetComponentHashesRequest from ._models_py3 import BatchGetComponentRequest from ._models_py3 import Binding from ._models_py3 import BulkTestDto from ._models_py3 import CloudError from ._models_py3 import CloudPrioritySetting from ._models_py3 import CloudSettings from ._models_py3 import ColumnTransformer from ._models_py3 import CommandJob from ._models_py3 import CommandJobLimits from ._models_py3 import CommandReturnCodeConfig from ._models_py3 import ComponentConfiguration from ._models_py3 import ComponentInput from ._models_py3 import ComponentJob from ._models_py3 import ComponentJobInput from ._models_py3 import ComponentJobOutput from ._models_py3 import ComponentNameAndDefaultVersion from ._models_py3 import ComponentNameMetaInfo from ._models_py3 import ComponentOutput from ._models_py3 import ComponentPreflightResult from ._models_py3 import ComponentSpecMetaInfo from ._models_py3 import ComponentUpdateRequest from ._models_py3 import ComponentValidationRequest from ._models_py3 import ComponentValidationResponse from ._models_py3 import Compute from ._models_py3 import ComputeConfiguration from ._models_py3 import ComputeContract from ._models_py3 import ComputeIdentityContract from ._models_py3 import ComputeIdentityDto from ._models_py3 import ComputeInfo from ._models_py3 import ComputeProperties from ._models_py3 import ComputeRPUserAssignedIdentity from ._models_py3 import ComputeRequest from ._models_py3 import ComputeSetting from ._models_py3 import ComputeStatus from ._models_py3 import ComputeStatusDetail from ._models_py3 import ComputeWarning from ._models_py3 import ConnectionConfigSpec from ._models_py3 import ConnectionDto from ._models_py3 import ConnectionEntity from ._models_py3 import ConnectionOverrideSetting from ._models_py3 import ConnectionSpec from ._models_py3 import ContainerInstanceConfiguration from ._models_py3 import ContainerRegistry from ._models_py3 import ContainerResourceRequirements from ._models_py3 import ControlInput from ._models_py3 import ControlOutput from ._models_py3 import CopyDataTask from ._models_py3 import CreateFlowFromSampleRequest from ._models_py3 import CreateFlowRequest from ._models_py3 import CreateFlowRuntimeRequest from ._models_py3 import CreateFlowSessionRequest from ._models_py3 import CreateInferencePipelineRequest from ._models_py3 import CreateOrUpdateConnectionRequest from ._models_py3 import CreateOrUpdateConnectionRequestDto from ._models_py3 import CreatePipelineDraftRequest from ._models_py3 import CreatePipelineJobScheduleDto from ._models_py3 import CreatePublishedPipelineRequest from ._models_py3 import CreateRealTimeEndpointRequest from ._models_py3 import CreatedBy from ._models_py3 import CreatedFromDto from ._models_py3 import CreationContext from ._models_py3 import Cron from ._models_py3 import CustomConnectionConfig from ._models_py3 import CustomReference from ._models_py3 import DBFSReference from ._models_py3 import Data from ._models_py3 import DataInfo from ._models_py3 import DataLocation from ._models_py3 import DataPath from ._models_py3 import DataPathParameter from ._models_py3 import DataPortDto from ._models_py3 import DataReference from ._models_py3 import DataReferenceConfiguration from ._models_py3 import DataSetDefinition from ._models_py3 import DataSetDefinitionValue from ._models_py3 import DataSetPathParameter from ._models_py3 import DataSettings from ._models_py3 import DataTransferCloudConfiguration from ._models_py3 import DataTransferSink from ._models_py3 import DataTransferSource from ._models_py3 import DataTransferV2CloudSetting from ._models_py3 import DataTypeCreationInfo from ._models_py3 import DatabaseSink from ._models_py3 import DatabaseSource from ._models_py3 import DatabricksComputeInfo from ._models_py3 import DatabricksConfiguration from ._models_py3 import DatacacheConfiguration from ._models_py3 import DatasetIdentifier from ._models_py3 import DatasetInputDetails from ._models_py3 import DatasetLineage from ._models_py3 import DatasetOutput from ._models_py3 import DatasetOutputDetails from ._models_py3 import DatasetOutputOptions from ._models_py3 import DatasetRegistration from ._models_py3 import DatasetRegistrationOptions from ._models_py3 import DatastoreSetting from ._models_py3 import DbfsStorageInfoDto from ._models_py3 import DebugInfoResponse from ._models_py3 import DeployFlowRequest from ._models_py3 import DeploymentInfo from ._models_py3 import DistributionConfiguration from ._models_py3 import DistributionParameter from ._models_py3 import DoWhileControlFlowInfo from ._models_py3 import DoWhileControlFlowRunSettings from ._models_py3 import DockerBuildContext from ._models_py3 import DockerConfiguration from ._models_py3 import DockerImagePlatform from ._models_py3 import DockerSection from ._models_py3 import DockerSettingConfiguration from ._models_py3 import DownloadResourceInfo from ._models_py3 import EPRPipelineRunErrorClassificationRequest from ._models_py3 import EndpointSetting from ._models_py3 import EntityInterface from ._models_py3 import EntrySetting from ._models_py3 import EnumParameterRule from ._models_py3 import EnvironmentConfiguration from ._models_py3 import EnvironmentDefinition from ._models_py3 import EnvironmentDefinitionDto from ._models_py3 import ErrorAdditionalInfo from ._models_py3 import ErrorResponse from ._models_py3 import EsCloudConfiguration from ._models_py3 import EvaluationFlowRunSettings from ._models_py3 import ExampleRequest from ._models_py3 import ExecutionContextDto from ._models_py3 import ExecutionDataLocation from ._models_py3 import ExecutionDataPath from ._models_py3 import ExecutionGlobsOptions from ._models_py3 import ExperimentComputeMetaInfo from ._models_py3 import ExperimentInfo from ._models_py3 import ExportComponentMetaInfo from ._models_py3 import ExportDataTask from ._models_py3 import FeaturizationSettings from ._models_py3 import FeedDto from ._models_py3 import FeedDtoSupportedAssetTypes from ._models_py3 import FileSystem from ._models_py3 import Flow from ._models_py3 import FlowAnnotations from ._models_py3 import FlowBaseDto from ._models_py3 import FlowDto from ._models_py3 import FlowEnvironment from ._models_py3 import FlowFeature from ._models_py3 import FlowFeatureState from ._models_py3 import FlowGraph from ._models_py3 import FlowGraphAnnotationNode from ._models_py3 import FlowGraphLayout from ._models_py3 import FlowGraphReference from ._models_py3 import FlowIndexEntity from ._models_py3 import FlowInputDefinition from ._models_py3 import FlowNode from ._models_py3 import FlowNodeLayout from ._models_py3 import FlowNodeVariant from ._models_py3 import FlowOutputDefinition from ._models_py3 import FlowProperties from ._models_py3 import FlowRunBasePath from ._models_py3 import FlowRunInfo from ._models_py3 import FlowRunResult from ._models_py3 import FlowRunSettings from ._models_py3 import FlowRuntimeCapability from ._models_py3 import FlowRuntimeDto from ._models_py3 import FlowSampleDto from ._models_py3 import FlowSessionDto from ._models_py3 import FlowSnapshot from ._models_py3 import FlowSubmitRunSettings from ._models_py3 import FlowTestInfo from ._models_py3 import FlowTestStorageSetting from ._models_py3 import FlowToolSettingParameter from ._models_py3 import FlowToolsDto from ._models_py3 import FlowVariantNode from ._models_py3 import ForecastHorizon from ._models_py3 import ForecastingSettings from ._models_py3 import GeneralSettings from ._models_py3 import GeneratePipelineComponentRequest from ._models_py3 import GenerateToolMetaRequest from ._models_py3 import GetDynamicListRequest from ._models_py3 import GetRunDataResultDto from ._models_py3 import GetTrainingSessionDto from ._models_py3 import GlobalJobDispatcherConfiguration from ._models_py3 import GlobsOptions from ._models_py3 import GraphAnnotationNode from ._models_py3 import GraphControlNode from ._models_py3 import GraphControlReferenceNode from ._models_py3 import GraphDatasetNode from ._models_py3 import GraphDraftEntity from ._models_py3 import GraphEdge from ._models_py3 import GraphLayout from ._models_py3 import GraphLayoutCreationInfo from ._models_py3 import GraphModuleNode from ._models_py3 import GraphModuleNodeRunSetting from ._models_py3 import GraphModuleNodeUIInputSetting from ._models_py3 import GraphNodeStatusInfo from ._models_py3 import GraphReferenceNode from ._models_py3 import HdfsReference from ._models_py3 import HdiClusterComputeInfo from ._models_py3 import HdiConfiguration from ._models_py3 import HdiRunConfiguration from ._models_py3 import HistoryConfiguration from ._models_py3 import HyperDriveConfiguration from ._models_py3 import ICheckableLongRunningOperationResponse from ._models_py3 import IdentityConfiguration from ._models_py3 import IdentitySetting from ._models_py3 import ImportDataTask from ._models_py3 import IndexedErrorResponse from ._models_py3 import InitScriptInfoDto from ._models_py3 import InnerErrorDetails from ._models_py3 import InnerErrorResponse from ._models_py3 import InputAsset from ._models_py3 import InputData from ._models_py3 import InputDataBinding from ._models_py3 import InputDefinition from ._models_py3 import InputOutputPortMetadata from ._models_py3 import InputSetting from ._models_py3 import IntellectualPropertyPublisherInformation from ._models_py3 import InteractiveConfig from ._models_py3 import InteractiveConfiguration from ._models_py3 import JobCost from ._models_py3 import JobEndpoint from ._models_py3 import JobInput from ._models_py3 import JobOutput from ._models_py3 import JobOutputArtifacts from ._models_py3 import JobScheduleDto from ._models_py3 import K8SConfiguration from ._models_py3 import KeyValuePairComponentNameMetaInfoErrorResponse from ._models_py3 import KeyValuePairComponentNameMetaInfoModuleDto from ._models_py3 import KeyValuePairStringObject from ._models_py3 import KubernetesConfiguration from ._models_py3 import Kwarg from ._models_py3 import LegacyDataPath from ._models_py3 import LimitSettings from ._models_py3 import LinkedADBWorkspaceMetadata from ._models_py3 import LinkedPipelineInfo from ._models_py3 import LoadFlowAsComponentRequest from ._models_py3 import LogRunTerminatedEventDto from ._models_py3 import LongRunningOperationUriResponse from ._models_py3 import LongRunningUpdateRegistryComponentRequest from ._models_py3 import ManagedServiceIdentity from ._models_py3 import MavenLibraryDto from ._models_py3 import MetricProperties from ._models_py3 import MetricSchemaDto from ._models_py3 import MetricSchemaPropertyDto from ._models_py3 import MetricV2Dto from ._models_py3 import MetricV2Value from ._models_py3 import MfeInternalAutologgerSettings from ._models_py3 import MfeInternalIdentityConfiguration from ._models_py3 import MfeInternalNodes from ._models_py3 import MfeInternalOutputData from ._models_py3 import MfeInternalSecretConfiguration from ._models_py3 import MfeInternalUriReference from ._models_py3 import MfeInternalV20211001ComponentJob from ._models_py3 import MinMaxParameterRule from ._models_py3 import MlcComputeInfo from ._models_py3 import ModelDto from ._models_py3 import ModelManagementErrorResponse from ._models_py3 import ModifyPipelineJobScheduleDto from ._models_py3 import ModuleDto from ._models_py3 import ModuleDtoWithErrors from ._models_py3 import ModuleDtoWithValidateStatus from ._models_py3 import ModuleEntity from ._models_py3 import ModulePythonInterface from ._models_py3 import MpiConfiguration from ._models_py3 import NCrossValidations from ._models_py3 import Node from ._models_py3 import NodeInputPort from ._models_py3 import NodeLayout from ._models_py3 import NodeOutputPort from ._models_py3 import NodePortInterface from ._models_py3 import NodeSource from ._models_py3 import NodeTelemetryMetaInfo from ._models_py3 import NodeVariant from ._models_py3 import Nodes from ._models_py3 import NoteBookTaskDto from ._models_py3 import NotificationSetting from ._models_py3 import ODataError from ._models_py3 import ODataErrorDetail from ._models_py3 import ODataErrorResponse from ._models_py3 import ODataInnerError from ._models_py3 import OutputData from ._models_py3 import OutputDataBinding from ._models_py3 import OutputDatasetLineage from ._models_py3 import OutputDefinition from ._models_py3 import OutputOptions from ._models_py3 import OutputSetting from ._models_py3 import OutputSettingSpec from ._models_py3 import PaginatedDataInfoList from ._models_py3 import PaginatedModelDtoList from ._models_py3 import PaginatedModuleDtoList from ._models_py3 import PaginatedPipelineDraftSummaryList from ._models_py3 import PaginatedPipelineEndpointSummaryList from ._models_py3 import PaginatedPipelineRunSummaryList from ._models_py3 import PaginatedPublishedPipelineSummaryList from ._models_py3 import ParallelForControlFlowInfo from ._models_py3 import ParallelTaskConfiguration from ._models_py3 import Parameter from ._models_py3 import ParameterAssignment from ._models_py3 import ParameterDefinition from ._models_py3 import PatchFlowRequest from ._models_py3 import Pipeline from ._models_py3 import PipelineDraft from ._models_py3 import PipelineDraftStepDetails from ._models_py3 import PipelineDraftSummary from ._models_py3 import PipelineEndpoint from ._models_py3 import PipelineEndpointSummary from ._models_py3 import PipelineGraph from ._models_py3 import PipelineInput from ._models_py3 import PipelineJob from ._models_py3 import PipelineJobRuntimeBasicSettings from ._models_py3 import PipelineJobScheduleDto from ._models_py3 import PipelineOutput from ._models_py3 import PipelineRun from ._models_py3 import PipelineRunGraphDetail from ._models_py3 import PipelineRunGraphStatus from ._models_py3 import PipelineRunProfile from ._models_py3 import PipelineRunStatus from ._models_py3 import PipelineRunStepDetails from ._models_py3 import PipelineRunSummary from ._models_py3 import PipelineStatus from ._models_py3 import PipelineStepRun from ._models_py3 import PipelineStepRunOutputs from ._models_py3 import PipelineSubDraft from ._models_py3 import PolicyValidationResponse from ._models_py3 import PortInfo from ._models_py3 import PortOutputInfo from ._models_py3 import PriorityConfig from ._models_py3 import PriorityConfiguration from ._models_py3 import PromoteDataSetRequest from ._models_py3 import ProviderEntity from ._models_py3 import PublishedPipeline from ._models_py3 import PublishedPipelineSummary from ._models_py3 import PyTorchConfiguration from ._models_py3 import PythonInterfaceMapping from ._models_py3 import PythonPyPiOrRCranLibraryDto from ._models_py3 import PythonSection from ._models_py3 import QueueingInfo from ._models_py3 import RCranPackage from ._models_py3 import RGitHubPackage from ._models_py3 import RSection from ._models_py3 import RawComponentDto from ._models_py3 import RayConfiguration from ._models_py3 import RealTimeEndpoint from ._models_py3 import RealTimeEndpointInfo from ._models_py3 import RealTimeEndpointStatus from ._models_py3 import RealTimeEndpointSummary from ._models_py3 import RealTimeEndpointTestRequest from ._models_py3 import Recurrence from ._models_py3 import RecurrencePattern from ._models_py3 import RecurrenceSchedule from ._models_py3 import RegenerateServiceKeysRequest from ._models_py3 import RegisterComponentMetaInfo from ._models_py3 import RegisterComponentMetaInfoExtraHashes from ._models_py3 import RegisterComponentMetaInfoIdentifierHashes from ._models_py3 import RegisterRegistryComponentMetaInfo from ._models_py3 import RegisterRegistryComponentMetaInfoExtraHashes from ._models_py3 import RegisterRegistryComponentMetaInfoIdentifierHashes from ._models_py3 import RegisteredDataSetReference from ._models_py3 import RegistrationOptions from ._models_py3 import RegistryBlobReferenceData from ._models_py3 import RegistryIdentity from ._models_py3 import Relationship from ._models_py3 import RemoteDockerComputeInfo from ._models_py3 import ResourceConfig from ._models_py3 import ResourceConfiguration from ._models_py3 import ResourcesSetting from ._models_py3 import RetrieveToolFuncResultRequest from ._models_py3 import RetryConfiguration from ._models_py3 import RootError from ._models_py3 import RunAnnotations from ._models_py3 import RunConfiguration from ._models_py3 import RunDatasetReference from ._models_py3 import RunDefinition from ._models_py3 import RunDetailsDto from ._models_py3 import RunDetailsWarningDto from ._models_py3 import RunDto from ._models_py3 import RunIndexEntity from ._models_py3 import RunIndexMetricSummary from ._models_py3 import RunIndexMetricSummarySystemObject from ._models_py3 import RunIndexResourceMetricSummary from ._models_py3 import RunMetricDto from ._models_py3 import RunMetricsTypesDto from ._models_py3 import RunProperties from ._models_py3 import RunSettingParameter from ._models_py3 import RunSettingParameterAssignment from ._models_py3 import RunSettingUIParameterHint from ._models_py3 import RunStatusPeriod from ._models_py3 import RunTypeV2 from ._models_py3 import RunTypeV2Index from ._models_py3 import RuntimeConfiguration from ._models_py3 import SampleMeta from ._models_py3 import SavePipelineDraftRequest from ._models_py3 import SavedDataSetReference from ._models_py3 import ScheduleBase from ._models_py3 import SchemaContractsCreatedBy from ._models_py3 import ScopeCloudConfiguration from ._models_py3 import Seasonality from ._models_py3 import SecretConfiguration from ._models_py3 import SegmentedResult1 from ._models_py3 import ServiceLogRequest from ._models_py3 import SessionApplication from ._models_py3 import SessionApplicationRunCommandResult from ._models_py3 import SessionProperties from ._models_py3 import SetupFlowSessionRequest from ._models_py3 import SharingScope from ._models_py3 import Snapshot from ._models_py3 import SnapshotInfo from ._models_py3 import SourceCodeDataReference from ._models_py3 import SparkConfiguration from ._models_py3 import SparkJarTaskDto from ._models_py3 import SparkJob from ._models_py3 import SparkJobEntry from ._models_py3 import SparkMavenPackage from ._models_py3 import SparkPythonTaskDto from ._models_py3 import SparkResourceConfiguration from ._models_py3 import SparkSection from ._models_py3 import SparkSubmitTaskDto from ._models_py3 import SqlDataPath from ._models_py3 import StackEnsembleSettings from ._models_py3 import StandbyPoolProperties from ._models_py3 import StandbyPoolResourceStatus from ._models_py3 import StartRunResult from ._models_py3 import StepRunProfile from ._models_py3 import StorageInfo from ._models_py3 import StoredProcedureParameter from ._models_py3 import Stream from ._models_py3 import StructuredInterface from ._models_py3 import StructuredInterfaceInput from ._models_py3 import StructuredInterfaceOutput from ._models_py3 import StructuredInterfaceParameter from ._models_py3 import StudioMigrationInfo from ._models_py3 import SubGraphConcatenateAssignment from ._models_py3 import SubGraphConfiguration from ._models_py3 import SubGraphConnectionInfo from ._models_py3 import SubGraphDataPathParameterAssignment from ._models_py3 import SubGraphInfo from ._models_py3 import SubGraphParameterAssignment from ._models_py3 import SubGraphPortInfo from ._models_py3 import SubPipelineDefinition from ._models_py3 import SubPipelineParameterAssignment from ._models_py3 import SubPipelinesInfo from ._models_py3 import SubStatusPeriod from ._models_py3 import SubmitBulkRunRequest from ._models_py3 import SubmitBulkRunResponse from ._models_py3 import SubmitFlowRequest from ._models_py3 import SubmitPipelineRunRequest from ._models_py3 import SweepEarlyTerminationPolicy from ._models_py3 import SweepSettings from ._models_py3 import SweepSettingsLimits from ._models_py3 import SystemData from ._models_py3 import SystemMeta from ._models_py3 import SystemMetaExtraHashes from ._models_py3 import SystemMetaIdentifierHashes from ._models_py3 import TargetLags from ._models_py3 import TargetRollingWindowSize from ._models_py3 import TargetSelectorConfiguration from ._models_py3 import Task from ._models_py3 import TaskControlFlowInfo from ._models_py3 import TaskReuseInfo from ._models_py3 import TensorflowConfiguration from ._models_py3 import TestDataSettings from ._models_py3 import Tool from ._models_py3 import ToolFuncResponse from ._models_py3 import ToolInputDynamicList from ._models_py3 import ToolInputGeneratedBy from ._models_py3 import ToolMetaDto from ._models_py3 import ToolSetting from ._models_py3 import ToolSourceMeta from ._models_py3 import TorchDistributedConfiguration from ._models_py3 import TrainingDiagnosticConfiguration from ._models_py3 import TrainingOutput from ._models_py3 import TrainingSettings from ._models_py3 import TriggerAsyncOperationStatus from ._models_py3 import TuningNodeSetting from ._models_py3 import TypedAssetReference from ._models_py3 import UIAzureOpenAIDeploymentNameSelector from ._models_py3 import UIAzureOpenAIModelCapabilities from ._models_py3 import UIColumnPicker from ._models_py3 import UIComputeSelection from ._models_py3 import UIHyperparameterConfiguration from ._models_py3 import UIInputSetting from ._models_py3 import UIJsonEditor from ._models_py3 import UIParameterHint from ._models_py3 import UIPromptFlowConnectionSelector from ._models_py3 import UIWidgetMetaInfo from ._models_py3 import UIYamlEditor from ._models_py3 import UnversionedEntityRequestDto from ._models_py3 import UnversionedEntityResponseDto from ._models_py3 import UnversionedRebuildIndexDto from ._models_py3 import UnversionedRebuildResponseDto from ._models_py3 import UpdateComponentRequest from ._models_py3 import UpdateFlowRequest from ._models_py3 import UpdateFlowRuntimeRequest from ._models_py3 import UpdateRegistryComponentRequest from ._models_py3 import UploadOptions from ._models_py3 import UriReference from ._models_py3 import User from ._models_py3 import UserAssignedIdentity from ._models_py3 import ValidationDataSettings from ._models_py3 import VariantNode from ._models_py3 import WebServiceComputeMetaInfo from ._models_py3 import WebServicePort from ._models_py3 import Webhook from ._models_py3 import WorkspaceConnectionSpec except (SyntaxError, ImportError): from ._models import ACIAdvanceSettings # type: ignore from ._models import AEVAComputeConfiguration # type: ignore from ._models import AEVAResourceConfiguration # type: ignore from ._models import AISuperComputerConfiguration # type: ignore from ._models import AISuperComputerScalePolicy # type: ignore from ._models import AISuperComputerStorageReferenceConfiguration # type: ignore from ._models import AKSAdvanceSettings # type: ignore from ._models import AKSReplicaStatus # type: ignore from ._models import AMLComputeConfiguration # type: ignore from ._models import APCloudConfiguration # type: ignore from ._models import Activate # type: ignore from ._models import AdditionalErrorInfo # type: ignore from ._models import AdhocTriggerScheduledCommandJobRequest # type: ignore from ._models import AdhocTriggerScheduledSparkJobRequest # type: ignore from ._models import AetherAPCloudConfiguration # type: ignore from ._models import AetherAmlDataset # type: ignore from ._models import AetherAmlSparkCloudSetting # type: ignore from ._models import AetherArgumentAssignment # type: ignore from ._models import AetherAssetDefinition # type: ignore from ._models import AetherAssetOutputSettings # type: ignore from ._models import AetherAutoFeaturizeConfiguration # type: ignore from ._models import AetherAutoMLComponentConfiguration # type: ignore from ._models import AetherAutoTrainConfiguration # type: ignore from ._models import AetherAzureBlobReference # type: ignore from ._models import AetherAzureDataLakeGen2Reference # type: ignore from ._models import AetherAzureDataLakeReference # type: ignore from ._models import AetherAzureDatabaseReference # type: ignore from ._models import AetherAzureFilesReference # type: ignore from ._models import AetherBatchAiComputeInfo # type: ignore from ._models import AetherBuildArtifactInfo # type: ignore from ._models import AetherCloudBuildDropPathInfo # type: ignore from ._models import AetherCloudBuildInfo # type: ignore from ._models import AetherCloudBuildQueueInfo # type: ignore from ._models import AetherCloudPrioritySetting # type: ignore from ._models import AetherCloudSettings # type: ignore from ._models import AetherColumnTransformer # type: ignore from ._models import AetherComputeConfiguration # type: ignore from ._models import AetherComputeSetting # type: ignore from ._models import AetherControlInput # type: ignore from ._models import AetherControlOutput # type: ignore from ._models import AetherCopyDataTask # type: ignore from ._models import AetherCosmosReference # type: ignore from ._models import AetherCreatedBy # type: ignore from ._models import AetherCustomReference # type: ignore from ._models import AetherDBFSReference # type: ignore from ._models import AetherDataLocation # type: ignore from ._models import AetherDataLocationReuseCalculationFields # type: ignore from ._models import AetherDataPath # type: ignore from ._models import AetherDataReference # type: ignore from ._models import AetherDataSetDefinition # type: ignore from ._models import AetherDataSetDefinitionValue # type: ignore from ._models import AetherDataSettings # type: ignore from ._models import AetherDataTransferCloudConfiguration # type: ignore from ._models import AetherDataTransferSink # type: ignore from ._models import AetherDataTransferSource # type: ignore from ._models import AetherDataTransferV2CloudSetting # type: ignore from ._models import AetherDatabaseSink # type: ignore from ._models import AetherDatabaseSource # type: ignore from ._models import AetherDatabricksComputeInfo # type: ignore from ._models import AetherDatasetOutput # type: ignore from ._models import AetherDatasetOutputOptions # type: ignore from ._models import AetherDatasetRegistration # type: ignore from ._models import AetherDatastoreSetting # type: ignore from ._models import AetherDoWhileControlFlowInfo # type: ignore from ._models import AetherDoWhileControlFlowRunSettings # type: ignore from ._models import AetherDockerSettingConfiguration # type: ignore from ._models import AetherEntityInterfaceDocumentation # type: ignore from ._models import AetherEntrySetting # type: ignore from ._models import AetherEnvironmentConfiguration # type: ignore from ._models import AetherEsCloudConfiguration # type: ignore from ._models import AetherExportDataTask # type: ignore from ._models import AetherFeaturizationSettings # type: ignore from ._models import AetherFileSystem # type: ignore from ._models import AetherForecastHorizon # type: ignore from ._models import AetherForecastingSettings # type: ignore from ._models import AetherGeneralSettings # type: ignore from ._models import AetherGlobsOptions # type: ignore from ._models import AetherGraphControlNode # type: ignore from ._models import AetherGraphControlReferenceNode # type: ignore from ._models import AetherGraphDatasetNode # type: ignore from ._models import AetherGraphEdge # type: ignore from ._models import AetherGraphEntity # type: ignore from ._models import AetherGraphModuleNode # type: ignore from ._models import AetherGraphReferenceNode # type: ignore from ._models import AetherHdfsReference # type: ignore from ._models import AetherHdiClusterComputeInfo # type: ignore from ._models import AetherHdiRunConfiguration # type: ignore from ._models import AetherHyperDriveConfiguration # type: ignore from ._models import AetherIdentitySetting # type: ignore from ._models import AetherImportDataTask # type: ignore from ._models import AetherInputSetting # type: ignore from ._models import AetherInteractiveConfig # type: ignore from ._models import AetherK8SConfiguration # type: ignore from ._models import AetherLegacyDataPath # type: ignore from ._models import AetherLimitSettings # type: ignore from ._models import AetherMlcComputeInfo # type: ignore from ._models import AetherModuleEntity # type: ignore from ._models import AetherModuleExtendedProperties # type: ignore from ._models import AetherNCrossValidations # type: ignore from ._models import AetherOutputSetting # type: ignore from ._models import AetherParallelForControlFlowInfo # type: ignore from ._models import AetherParameterAssignment # type: ignore from ._models import AetherPhillyHdfsReference # type: ignore from ._models import AetherPortInfo # type: ignore from ._models import AetherPriorityConfig # type: ignore from ._models import AetherPriorityConfiguration # type: ignore from ._models import AetherRegisteredDataSetReference # type: ignore from ._models import AetherRemoteDockerComputeInfo # type: ignore from ._models import AetherResourceAssignment # type: ignore from ._models import AetherResourceAttributeAssignment # type: ignore from ._models import AetherResourceAttributeDefinition # type: ignore from ._models import AetherResourceConfig # type: ignore from ._models import AetherResourceConfiguration # type: ignore from ._models import AetherResourceModel # type: ignore from ._models import AetherResourcesSetting # type: ignore from ._models import AetherSavedDataSetReference # type: ignore from ._models import AetherScopeCloudConfiguration # type: ignore from ._models import AetherSeasonality # type: ignore from ._models import AetherSqlDataPath # type: ignore from ._models import AetherStackEnsembleSettings # type: ignore from ._models import AetherStoredProcedureParameter # type: ignore from ._models import AetherStructuredInterface # type: ignore from ._models import AetherStructuredInterfaceInput # type: ignore from ._models import AetherStructuredInterfaceOutput # type: ignore from ._models import AetherStructuredInterfaceParameter # type: ignore from ._models import AetherSubGraphConfiguration # type: ignore from ._models import AetherSweepEarlyTerminationPolicy # type: ignore from ._models import AetherSweepSettings # type: ignore from ._models import AetherSweepSettingsLimits # type: ignore from ._models import AetherTargetLags # type: ignore from ._models import AetherTargetRollingWindowSize # type: ignore from ._models import AetherTargetSelectorConfiguration # type: ignore from ._models import AetherTestDataSettings # type: ignore from ._models import AetherTorchDistributedConfiguration # type: ignore from ._models import AetherTrainingOutput # type: ignore from ._models import AetherTrainingSettings # type: ignore from ._models import AetherUIAzureOpenAIDeploymentNameSelector # type: ignore from ._models import AetherUIAzureOpenAIModelCapabilities # type: ignore from ._models import AetherUIColumnPicker # type: ignore from ._models import AetherUIJsonEditor # type: ignore from ._models import AetherUIParameterHint # type: ignore from ._models import AetherUIPromptFlowConnectionSelector # type: ignore from ._models import AetherValidationDataSettings # type: ignore from ._models import AetherVsoBuildArtifactInfo # type: ignore from ._models import AetherVsoBuildDefinitionInfo # type: ignore from ._models import AetherVsoBuildInfo # type: ignore from ._models import AmlDataset # type: ignore from ._models import AmlK8SConfiguration # type: ignore from ._models import AmlK8SPriorityConfiguration # type: ignore from ._models import AmlSparkCloudSetting # type: ignore from ._models import ApiAndParameters # type: ignore from ._models import ApplicationEndpointConfiguration # type: ignore from ._models import ArgumentAssignment # type: ignore from ._models import Asset # type: ignore from ._models import AssetDefinition # type: ignore from ._models import AssetNameAndVersionIdentifier # type: ignore from ._models import AssetOutputSettings # type: ignore from ._models import AssetOutputSettingsParameter # type: ignore from ._models import AssetPublishResult # type: ignore from ._models import AssetPublishSingleRegionResult # type: ignore from ._models import AssetTypeMetaInfo # type: ignore from ._models import AssetVersionPublishRequest # type: ignore from ._models import AssignedUser # type: ignore from ._models import AuthKeys # type: ignore from ._models import AutoClusterComputeSpecification # type: ignore from ._models import AutoDeleteSetting # type: ignore from ._models import AutoFeaturizeConfiguration # type: ignore from ._models import AutoMLComponentConfiguration # type: ignore from ._models import AutoScaler # type: ignore from ._models import AutoTrainConfiguration # type: ignore from ._models import AutologgerSettings # type: ignore from ._models import AvailabilityResponse # type: ignore from ._models import AzureBlobReference # type: ignore from ._models import AzureDataLakeGen2Reference # type: ignore from ._models import AzureDataLakeReference # type: ignore from ._models import AzureDatabaseReference # type: ignore from ._models import AzureFilesReference # type: ignore from ._models import AzureMLModuleVersionDescriptor # type: ignore from ._models import AzureOpenAIDeploymentDto # type: ignore from ._models import AzureOpenAIModelCapabilities # type: ignore from ._models import BatchAiComputeInfo # type: ignore from ._models import BatchDataInput # type: ignore from ._models import BatchExportComponentSpecResponse # type: ignore from ._models import BatchExportRawComponentResponse # type: ignore from ._models import BatchGetComponentHashesRequest # type: ignore from ._models import BatchGetComponentRequest # type: ignore from ._models import Binding # type: ignore from ._models import BulkTestDto # type: ignore from ._models import CloudError # type: ignore from ._models import CloudPrioritySetting # type: ignore from ._models import CloudSettings # type: ignore from ._models import ColumnTransformer # type: ignore from ._models import CommandJob # type: ignore from ._models import CommandJobLimits # type: ignore from ._models import CommandReturnCodeConfig # type: ignore from ._models import ComponentConfiguration # type: ignore from ._models import ComponentInput # type: ignore from ._models import ComponentJob # type: ignore from ._models import ComponentJobInput # type: ignore from ._models import ComponentJobOutput # type: ignore from ._models import ComponentNameAndDefaultVersion # type: ignore from ._models import ComponentNameMetaInfo # type: ignore from ._models import ComponentOutput # type: ignore from ._models import ComponentPreflightResult # type: ignore from ._models import ComponentSpecMetaInfo # type: ignore from ._models import ComponentUpdateRequest # type: ignore from ._models import ComponentValidationRequest # type: ignore from ._models import ComponentValidationResponse # type: ignore from ._models import Compute # type: ignore from ._models import ComputeConfiguration # type: ignore from ._models import ComputeContract # type: ignore from ._models import ComputeIdentityContract # type: ignore from ._models import ComputeIdentityDto # type: ignore from ._models import ComputeInfo # type: ignore from ._models import ComputeProperties # type: ignore from ._models import ComputeRPUserAssignedIdentity # type: ignore from ._models import ComputeRequest # type: ignore from ._models import ComputeSetting # type: ignore from ._models import ComputeStatus # type: ignore from ._models import ComputeStatusDetail # type: ignore from ._models import ComputeWarning # type: ignore from ._models import ConnectionConfigSpec # type: ignore from ._models import ConnectionDto # type: ignore from ._models import ConnectionEntity # type: ignore from ._models import ConnectionOverrideSetting # type: ignore from ._models import ConnectionSpec # type: ignore from ._models import ContainerInstanceConfiguration # type: ignore from ._models import ContainerRegistry # type: ignore from ._models import ContainerResourceRequirements # type: ignore from ._models import ControlInput # type: ignore from ._models import ControlOutput # type: ignore from ._models import CopyDataTask # type: ignore from ._models import CreateFlowFromSampleRequest # type: ignore from ._models import CreateFlowRequest # type: ignore from ._models import CreateFlowRuntimeRequest # type: ignore from ._models import CreateFlowSessionRequest # type: ignore from ._models import CreateInferencePipelineRequest # type: ignore from ._models import CreateOrUpdateConnectionRequest # type: ignore from ._models import CreateOrUpdateConnectionRequestDto # type: ignore from ._models import CreatePipelineDraftRequest # type: ignore from ._models import CreatePipelineJobScheduleDto # type: ignore from ._models import CreatePublishedPipelineRequest # type: ignore from ._models import CreateRealTimeEndpointRequest # type: ignore from ._models import CreatedBy # type: ignore from ._models import CreatedFromDto # type: ignore from ._models import CreationContext # type: ignore from ._models import Cron # type: ignore from ._models import CustomConnectionConfig # type: ignore from ._models import CustomReference # type: ignore from ._models import DBFSReference # type: ignore from ._models import Data # type: ignore from ._models import DataInfo # type: ignore from ._models import DataLocation # type: ignore from ._models import DataPath # type: ignore from ._models import DataPathParameter # type: ignore from ._models import DataPortDto # type: ignore from ._models import DataReference # type: ignore from ._models import DataReferenceConfiguration # type: ignore from ._models import DataSetDefinition # type: ignore from ._models import DataSetDefinitionValue # type: ignore from ._models import DataSetPathParameter # type: ignore from ._models import DataSettings # type: ignore from ._models import DataTransferCloudConfiguration # type: ignore from ._models import DataTransferSink # type: ignore from ._models import DataTransferSource # type: ignore from ._models import DataTransferV2CloudSetting # type: ignore from ._models import DataTypeCreationInfo # type: ignore from ._models import DatabaseSink # type: ignore from ._models import DatabaseSource # type: ignore from ._models import DatabricksComputeInfo # type: ignore from ._models import DatabricksConfiguration # type: ignore from ._models import DatacacheConfiguration # type: ignore from ._models import DatasetIdentifier # type: ignore from ._models import DatasetInputDetails # type: ignore from ._models import DatasetLineage # type: ignore from ._models import DatasetOutput # type: ignore from ._models import DatasetOutputDetails # type: ignore from ._models import DatasetOutputOptions # type: ignore from ._models import DatasetRegistration # type: ignore from ._models import DatasetRegistrationOptions # type: ignore from ._models import DatastoreSetting # type: ignore from ._models import DbfsStorageInfoDto # type: ignore from ._models import DebugInfoResponse # type: ignore from ._models import DeployFlowRequest # type: ignore from ._models import DeploymentInfo # type: ignore from ._models import DistributionConfiguration # type: ignore from ._models import DistributionParameter # type: ignore from ._models import DoWhileControlFlowInfo # type: ignore from ._models import DoWhileControlFlowRunSettings # type: ignore from ._models import DockerBuildContext # type: ignore from ._models import DockerConfiguration # type: ignore from ._models import DockerImagePlatform # type: ignore from ._models import DockerSection # type: ignore from ._models import DockerSettingConfiguration # type: ignore from ._models import DownloadResourceInfo # type: ignore from ._models import EPRPipelineRunErrorClassificationRequest # type: ignore from ._models import EndpointSetting # type: ignore from ._models import EntityInterface # type: ignore from ._models import EntrySetting # type: ignore from ._models import EnumParameterRule # type: ignore from ._models import EnvironmentConfiguration # type: ignore from ._models import EnvironmentDefinition # type: ignore from ._models import EnvironmentDefinitionDto # type: ignore from ._models import ErrorAdditionalInfo # type: ignore from ._models import ErrorResponse # type: ignore from ._models import EsCloudConfiguration # type: ignore from ._models import EvaluationFlowRunSettings # type: ignore from ._models import ExampleRequest # type: ignore from ._models import ExecutionContextDto # type: ignore from ._models import ExecutionDataLocation # type: ignore from ._models import ExecutionDataPath # type: ignore from ._models import ExecutionGlobsOptions # type: ignore from ._models import ExperimentComputeMetaInfo # type: ignore from ._models import ExperimentInfo # type: ignore from ._models import ExportComponentMetaInfo # type: ignore from ._models import ExportDataTask # type: ignore from ._models import FeaturizationSettings # type: ignore from ._models import FeedDto # type: ignore from ._models import FeedDtoSupportedAssetTypes # type: ignore from ._models import FileSystem # type: ignore from ._models import Flow # type: ignore from ._models import FlowAnnotations # type: ignore from ._models import FlowBaseDto # type: ignore from ._models import FlowDto # type: ignore from ._models import FlowEnvironment # type: ignore from ._models import FlowFeature # type: ignore from ._models import FlowFeatureState # type: ignore from ._models import FlowGraph # type: ignore from ._models import FlowGraphAnnotationNode # type: ignore from ._models import FlowGraphLayout # type: ignore from ._models import FlowGraphReference # type: ignore from ._models import FlowIndexEntity # type: ignore from ._models import FlowInputDefinition # type: ignore from ._models import FlowNode # type: ignore from ._models import FlowNodeLayout # type: ignore from ._models import FlowNodeVariant # type: ignore from ._models import FlowOutputDefinition # type: ignore from ._models import FlowProperties # type: ignore from ._models import FlowRunBasePath # type: ignore from ._models import FlowRunInfo # type: ignore from ._models import FlowRunResult # type: ignore from ._models import FlowRunSettings # type: ignore from ._models import FlowRuntimeCapability # type: ignore from ._models import FlowRuntimeDto # type: ignore from ._models import FlowSampleDto # type: ignore from ._models import FlowSessionDto # type: ignore from ._models import FlowSnapshot # type: ignore from ._models import FlowSubmitRunSettings # type: ignore from ._models import FlowTestInfo # type: ignore from ._models import FlowTestStorageSetting # type: ignore from ._models import FlowToolSettingParameter # type: ignore from ._models import FlowToolsDto # type: ignore from ._models import FlowVariantNode # type: ignore from ._models import ForecastHorizon # type: ignore from ._models import ForecastingSettings # type: ignore from ._models import GeneralSettings # type: ignore from ._models import GeneratePipelineComponentRequest # type: ignore from ._models import GenerateToolMetaRequest # type: ignore from ._models import GetDynamicListRequest # type: ignore from ._models import GetRunDataResultDto # type: ignore from ._models import GetTrainingSessionDto # type: ignore from ._models import GlobalJobDispatcherConfiguration # type: ignore from ._models import GlobsOptions # type: ignore from ._models import GraphAnnotationNode # type: ignore from ._models import GraphControlNode # type: ignore from ._models import GraphControlReferenceNode # type: ignore from ._models import GraphDatasetNode # type: ignore from ._models import GraphDraftEntity # type: ignore from ._models import GraphEdge # type: ignore from ._models import GraphLayout # type: ignore from ._models import GraphLayoutCreationInfo # type: ignore from ._models import GraphModuleNode # type: ignore from ._models import GraphModuleNodeRunSetting # type: ignore from ._models import GraphModuleNodeUIInputSetting # type: ignore from ._models import GraphNodeStatusInfo # type: ignore from ._models import GraphReferenceNode # type: ignore from ._models import HdfsReference # type: ignore from ._models import HdiClusterComputeInfo # type: ignore from ._models import HdiConfiguration # type: ignore from ._models import HdiRunConfiguration # type: ignore from ._models import HistoryConfiguration # type: ignore from ._models import HyperDriveConfiguration # type: ignore from ._models import ICheckableLongRunningOperationResponse # type: ignore from ._models import IdentityConfiguration # type: ignore from ._models import IdentitySetting # type: ignore from ._models import ImportDataTask # type: ignore from ._models import IndexedErrorResponse # type: ignore from ._models import InitScriptInfoDto # type: ignore from ._models import InnerErrorDetails # type: ignore from ._models import InnerErrorResponse # type: ignore from ._models import InputAsset # type: ignore from ._models import InputData # type: ignore from ._models import InputDataBinding # type: ignore from ._models import InputDefinition # type: ignore from ._models import InputOutputPortMetadata # type: ignore from ._models import InputSetting # type: ignore from ._models import IntellectualPropertyPublisherInformation # type: ignore from ._models import InteractiveConfig # type: ignore from ._models import InteractiveConfiguration # type: ignore from ._models import JobCost # type: ignore from ._models import JobEndpoint # type: ignore from ._models import JobInput # type: ignore from ._models import JobOutput # type: ignore from ._models import JobOutputArtifacts # type: ignore from ._models import JobScheduleDto # type: ignore from ._models import K8SConfiguration # type: ignore from ._models import KeyValuePairComponentNameMetaInfoErrorResponse # type: ignore from ._models import KeyValuePairComponentNameMetaInfoModuleDto # type: ignore from ._models import KeyValuePairStringObject # type: ignore from ._models import KubernetesConfiguration # type: ignore from ._models import Kwarg # type: ignore from ._models import LegacyDataPath # type: ignore from ._models import LimitSettings # type: ignore from ._models import LinkedADBWorkspaceMetadata # type: ignore from ._models import LinkedPipelineInfo # type: ignore from ._models import LoadFlowAsComponentRequest # type: ignore from ._models import LogRunTerminatedEventDto # type: ignore from ._models import LongRunningOperationUriResponse # type: ignore from ._models import LongRunningUpdateRegistryComponentRequest # type: ignore from ._models import ManagedServiceIdentity # type: ignore from ._models import MavenLibraryDto # type: ignore from ._models import MetricProperties # type: ignore from ._models import MetricSchemaDto # type: ignore from ._models import MetricSchemaPropertyDto # type: ignore from ._models import MetricV2Dto # type: ignore from ._models import MetricV2Value # type: ignore from ._models import MfeInternalAutologgerSettings # type: ignore from ._models import MfeInternalIdentityConfiguration # type: ignore from ._models import MfeInternalNodes # type: ignore from ._models import MfeInternalOutputData # type: ignore from ._models import MfeInternalSecretConfiguration # type: ignore from ._models import MfeInternalUriReference # type: ignore from ._models import MfeInternalV20211001ComponentJob # type: ignore from ._models import MinMaxParameterRule # type: ignore from ._models import MlcComputeInfo # type: ignore from ._models import ModelDto # type: ignore from ._models import ModelManagementErrorResponse # type: ignore from ._models import ModifyPipelineJobScheduleDto # type: ignore from ._models import ModuleDto # type: ignore from ._models import ModuleDtoWithErrors # type: ignore from ._models import ModuleDtoWithValidateStatus # type: ignore from ._models import ModuleEntity # type: ignore from ._models import ModulePythonInterface # type: ignore from ._models import MpiConfiguration # type: ignore from ._models import NCrossValidations # type: ignore from ._models import Node # type: ignore from ._models import NodeInputPort # type: ignore from ._models import NodeLayout # type: ignore from ._models import NodeOutputPort # type: ignore from ._models import NodePortInterface # type: ignore from ._models import NodeSource # type: ignore from ._models import NodeTelemetryMetaInfo # type: ignore from ._models import NodeVariant # type: ignore from ._models import Nodes # type: ignore from ._models import NoteBookTaskDto # type: ignore from ._models import NotificationSetting # type: ignore from ._models import ODataError # type: ignore from ._models import ODataErrorDetail # type: ignore from ._models import ODataErrorResponse # type: ignore from ._models import ODataInnerError # type: ignore from ._models import OutputData # type: ignore from ._models import OutputDataBinding # type: ignore from ._models import OutputDatasetLineage # type: ignore from ._models import OutputDefinition # type: ignore from ._models import OutputOptions # type: ignore from ._models import OutputSetting # type: ignore from ._models import OutputSettingSpec # type: ignore from ._models import PaginatedDataInfoList # type: ignore from ._models import PaginatedModelDtoList # type: ignore from ._models import PaginatedModuleDtoList # type: ignore from ._models import PaginatedPipelineDraftSummaryList # type: ignore from ._models import PaginatedPipelineEndpointSummaryList # type: ignore from ._models import PaginatedPipelineRunSummaryList # type: ignore from ._models import PaginatedPublishedPipelineSummaryList # type: ignore from ._models import ParallelForControlFlowInfo # type: ignore from ._models import ParallelTaskConfiguration # type: ignore from ._models import Parameter # type: ignore from ._models import ParameterAssignment # type: ignore from ._models import ParameterDefinition # type: ignore from ._models import PatchFlowRequest # type: ignore from ._models import Pipeline # type: ignore from ._models import PipelineDraft # type: ignore from ._models import PipelineDraftStepDetails # type: ignore from ._models import PipelineDraftSummary # type: ignore from ._models import PipelineEndpoint # type: ignore from ._models import PipelineEndpointSummary # type: ignore from ._models import PipelineGraph # type: ignore from ._models import PipelineInput # type: ignore from ._models import PipelineJob # type: ignore from ._models import PipelineJobRuntimeBasicSettings # type: ignore from ._models import PipelineJobScheduleDto # type: ignore from ._models import PipelineOutput # type: ignore from ._models import PipelineRun # type: ignore from ._models import PipelineRunGraphDetail # type: ignore from ._models import PipelineRunGraphStatus # type: ignore from ._models import PipelineRunProfile # type: ignore from ._models import PipelineRunStatus # type: ignore from ._models import PipelineRunStepDetails # type: ignore from ._models import PipelineRunSummary # type: ignore from ._models import PipelineStatus # type: ignore from ._models import PipelineStepRun # type: ignore from ._models import PipelineStepRunOutputs # type: ignore from ._models import PipelineSubDraft # type: ignore from ._models import PolicyValidationResponse # type: ignore from ._models import PortInfo # type: ignore from ._models import PortOutputInfo # type: ignore from ._models import PriorityConfig # type: ignore from ._models import PriorityConfiguration # type: ignore from ._models import PromoteDataSetRequest # type: ignore from ._models import ProviderEntity # type: ignore from ._models import PublishedPipeline # type: ignore from ._models import PublishedPipelineSummary # type: ignore from ._models import PyTorchConfiguration # type: ignore from ._models import PythonInterfaceMapping # type: ignore from ._models import PythonPyPiOrRCranLibraryDto # type: ignore from ._models import PythonSection # type: ignore from ._models import QueueingInfo # type: ignore from ._models import RCranPackage # type: ignore from ._models import RGitHubPackage # type: ignore from ._models import RSection # type: ignore from ._models import RawComponentDto # type: ignore from ._models import RayConfiguration # type: ignore from ._models import RealTimeEndpoint # type: ignore from ._models import RealTimeEndpointInfo # type: ignore from ._models import RealTimeEndpointStatus # type: ignore from ._models import RealTimeEndpointSummary # type: ignore from ._models import RealTimeEndpointTestRequest # type: ignore from ._models import Recurrence # type: ignore from ._models import RecurrencePattern # type: ignore from ._models import RecurrenceSchedule # type: ignore from ._models import RegenerateServiceKeysRequest # type: ignore from ._models import RegisterComponentMetaInfo # type: ignore from ._models import RegisterComponentMetaInfoExtraHashes # type: ignore from ._models import RegisterComponentMetaInfoIdentifierHashes # type: ignore from ._models import RegisterRegistryComponentMetaInfo # type: ignore from ._models import RegisterRegistryComponentMetaInfoExtraHashes # type: ignore from ._models import RegisterRegistryComponentMetaInfoIdentifierHashes # type: ignore from ._models import RegisteredDataSetReference # type: ignore from ._models import RegistrationOptions # type: ignore from ._models import RegistryBlobReferenceData # type: ignore from ._models import RegistryIdentity # type: ignore from ._models import Relationship # type: ignore from ._models import RemoteDockerComputeInfo # type: ignore from ._models import ResourceConfig # type: ignore from ._models import ResourceConfiguration # type: ignore from ._models import ResourcesSetting # type: ignore from ._models import RetrieveToolFuncResultRequest # type: ignore from ._models import RetryConfiguration # type: ignore from ._models import RootError # type: ignore from ._models import RunAnnotations # type: ignore from ._models import RunConfiguration # type: ignore from ._models import RunDatasetReference # type: ignore from ._models import RunDefinition # type: ignore from ._models import RunDetailsDto # type: ignore from ._models import RunDetailsWarningDto # type: ignore from ._models import RunDto # type: ignore from ._models import RunIndexEntity # type: ignore from ._models import RunIndexMetricSummary # type: ignore from ._models import RunIndexMetricSummarySystemObject # type: ignore from ._models import RunIndexResourceMetricSummary # type: ignore from ._models import RunMetricDto # type: ignore from ._models import RunMetricsTypesDto # type: ignore from ._models import RunProperties # type: ignore from ._models import RunSettingParameter # type: ignore from ._models import RunSettingParameterAssignment # type: ignore from ._models import RunSettingUIParameterHint # type: ignore from ._models import RunStatusPeriod # type: ignore from ._models import RunTypeV2 # type: ignore from ._models import RunTypeV2Index # type: ignore from ._models import RuntimeConfiguration # type: ignore from ._models import SampleMeta # type: ignore from ._models import SavePipelineDraftRequest # type: ignore from ._models import SavedDataSetReference # type: ignore from ._models import ScheduleBase # type: ignore from ._models import SchemaContractsCreatedBy # type: ignore from ._models import ScopeCloudConfiguration # type: ignore from ._models import Seasonality # type: ignore from ._models import SecretConfiguration # type: ignore from ._models import SegmentedResult1 # type: ignore from ._models import ServiceLogRequest # type: ignore from ._models import SessionApplication # type: ignore from ._models import SessionApplicationRunCommandResult # type: ignore from ._models import SessionProperties # type: ignore from ._models import SetupFlowSessionRequest # type: ignore from ._models import SharingScope # type: ignore from ._models import Snapshot # type: ignore from ._models import SnapshotInfo # type: ignore from ._models import SourceCodeDataReference # type: ignore from ._models import SparkConfiguration # type: ignore from ._models import SparkJarTaskDto # type: ignore from ._models import SparkJob # type: ignore from ._models import SparkJobEntry # type: ignore from ._models import SparkMavenPackage # type: ignore from ._models import SparkPythonTaskDto # type: ignore from ._models import SparkResourceConfiguration # type: ignore from ._models import SparkSection # type: ignore from ._models import SparkSubmitTaskDto # type: ignore from ._models import SqlDataPath # type: ignore from ._models import StackEnsembleSettings # type: ignore from ._models import StandbyPoolProperties # type: ignore from ._models import StandbyPoolResourceStatus # type: ignore from ._models import StartRunResult # type: ignore from ._models import StepRunProfile # type: ignore from ._models import StorageInfo # type: ignore from ._models import StoredProcedureParameter # type: ignore from ._models import Stream # type: ignore from ._models import StructuredInterface # type: ignore from ._models import StructuredInterfaceInput # type: ignore from ._models import StructuredInterfaceOutput # type: ignore from ._models import StructuredInterfaceParameter # type: ignore from ._models import StudioMigrationInfo # type: ignore from ._models import SubGraphConcatenateAssignment # type: ignore from ._models import SubGraphConfiguration # type: ignore from ._models import SubGraphConnectionInfo # type: ignore from ._models import SubGraphDataPathParameterAssignment # type: ignore from ._models import SubGraphInfo # type: ignore from ._models import SubGraphParameterAssignment # type: ignore from ._models import SubGraphPortInfo # type: ignore from ._models import SubPipelineDefinition # type: ignore from ._models import SubPipelineParameterAssignment # type: ignore from ._models import SubPipelinesInfo # type: ignore from ._models import SubStatusPeriod # type: ignore from ._models import SubmitBulkRunRequest # type: ignore from ._models import SubmitBulkRunResponse # type: ignore from ._models import SubmitFlowRequest # type: ignore from ._models import SubmitPipelineRunRequest # type: ignore from ._models import SweepEarlyTerminationPolicy # type: ignore from ._models import SweepSettings # type: ignore from ._models import SweepSettingsLimits # type: ignore from ._models import SystemData # type: ignore from ._models import SystemMeta # type: ignore from ._models import SystemMetaExtraHashes # type: ignore from ._models import SystemMetaIdentifierHashes # type: ignore from ._models import TargetLags # type: ignore from ._models import TargetRollingWindowSize # type: ignore from ._models import TargetSelectorConfiguration # type: ignore from ._models import Task # type: ignore from ._models import TaskControlFlowInfo # type: ignore from ._models import TaskReuseInfo # type: ignore from ._models import TensorflowConfiguration # type: ignore from ._models import TestDataSettings # type: ignore from ._models import Tool # type: ignore from ._models import ToolFuncResponse # type: ignore from ._models import ToolInputDynamicList # type: ignore from ._models import ToolInputGeneratedBy # type: ignore from ._models import ToolMetaDto # type: ignore from ._models import ToolSetting # type: ignore from ._models import ToolSourceMeta # type: ignore from ._models import TorchDistributedConfiguration # type: ignore from ._models import TrainingDiagnosticConfiguration # type: ignore from ._models import TrainingOutput # type: ignore from ._models import TrainingSettings # type: ignore from ._models import TriggerAsyncOperationStatus # type: ignore from ._models import TuningNodeSetting # type: ignore from ._models import TypedAssetReference # type: ignore from ._models import UIAzureOpenAIDeploymentNameSelector # type: ignore from ._models import UIAzureOpenAIModelCapabilities # type: ignore from ._models import UIColumnPicker # type: ignore from ._models import UIComputeSelection # type: ignore from ._models import UIHyperparameterConfiguration # type: ignore from ._models import UIInputSetting # type: ignore from ._models import UIJsonEditor # type: ignore from ._models import UIParameterHint # type: ignore from ._models import UIPromptFlowConnectionSelector # type: ignore from ._models import UIWidgetMetaInfo # type: ignore from ._models import UIYamlEditor # type: ignore from ._models import UnversionedEntityRequestDto # type: ignore from ._models import UnversionedEntityResponseDto # type: ignore from ._models import UnversionedRebuildIndexDto # type: ignore from ._models import UnversionedRebuildResponseDto # type: ignore from ._models import UpdateComponentRequest # type: ignore from ._models import UpdateFlowRequest # type: ignore from ._models import UpdateFlowRuntimeRequest # type: ignore from ._models import UpdateRegistryComponentRequest # type: ignore from ._models import UploadOptions # type: ignore from ._models import UriReference # type: ignore from ._models import User # type: ignore from ._models import UserAssignedIdentity # type: ignore from ._models import ValidationDataSettings # type: ignore from ._models import VariantNode # type: ignore from ._models import WebServiceComputeMetaInfo # type: ignore from ._models import WebServicePort # type: ignore from ._models import Webhook # type: ignore from ._models import WorkspaceConnectionSpec # type: ignore from ._azure_machine_learning_designer_service_client_enums import ( AEVAAssetType, AEVADataStoreMode, AEVAIdentityType, ActionType, AetherArgumentValueType, AetherAssetType, AetherBuildSourceType, AetherComputeType, AetherControlFlowType, AetherControlInputValue, AetherDataCopyMode, AetherDataLocationStorageType, AetherDataReferenceType, AetherDataStoreMode, AetherDataTransferStorageType, AetherDataTransferTaskType, AetherDatasetType, AetherEarlyTerminationPolicyType, AetherEntityStatus, AetherExecutionEnvironment, AetherExecutionPhase, AetherFeaturizationMode, AetherFileBasedPathType, AetherForecastHorizonMode, AetherIdentityType, AetherLogVerbosity, AetherModuleDeploymentSource, AetherModuleHashVersion, AetherModuleType, AetherNCrossValidationMode, AetherParameterType, AetherParameterValueType, AetherPrimaryMetrics, AetherRepositoryType, AetherResourceOperator, AetherResourceValueType, AetherSamplingAlgorithmType, AetherSeasonalityMode, AetherShortSeriesHandlingConfiguration, AetherStackMetaLearnerType, AetherStoredProcedureParameterType, AetherTabularTrainingMode, AetherTargetAggregationFunction, AetherTargetLagsMode, AetherTargetRollingWindowSizeMode, AetherTaskType, AetherTrainingOutputType, AetherUIScriptLanguageEnum, AetherUIWidgetTypeEnum, AetherUploadState, AetherUseStl, ApplicationEndpointType, ArgumentValueType, AssetScopeTypes, AssetSourceType, AssetType, AutoDeleteCondition, BuildContextLocationType, Communicator, ComponentRegistrationTypeEnum, ComponentType, ComputeEnvironmentType, ComputeTargetType, ComputeType, ConfigValueType, ConnectionCategory, ConnectionScope, ConnectionSourceType, ConnectionType, ConsumeMode, ControlFlowType, ControlInputValue, DataBindingMode, DataCategory, DataCopyMode, DataLocationStorageType, DataPortType, DataReferenceType, DataSourceType, DataStoreMode, DataTransferStorageType, DataTransferTaskType, DataTypeMechanism, DatasetAccessModes, DatasetConsumptionType, DatasetDeliveryMechanism, DatasetOutputType, DatasetType, DeliveryMechanism, DistributionParameterEnum, DistributionType, EarlyTerminationPolicyType, EmailNotificationEnableType, EndpointAuthMode, EntityKind, EntityStatus, ErrorHandlingMode, ExecutionPhase, FeaturizationMode, FlowFeatureStateEnum, FlowLanguage, FlowPatchOperationType, FlowRunMode, FlowRunTypeEnum, FlowRuntimeSubmissionApiVersion, FlowTestMode, FlowType, ForecastHorizonMode, Framework, Frequency, GlobalJobDispatcherSupportedComputeType, GraphComponentsMode, GraphDatasetsLoadModes, GraphSdkCodeType, HttpStatusCode, IdentityType, InputType, IntellectualPropertyAccessMode, JobInputType, JobLimitsType, JobOutputType, JobProvisioningState, JobStatus, JobType, KeyType, ListViewType, LogLevel, LogVerbosity, LongRunningUpdateType, MLFlowAutologgerState, ManagedServiceIdentityType, MetricValueType, MfeInternalIdentityType, MfeInternalMLFlowAutologgerState, MfeInternalScheduleStatus, ModuleDtoFields, ModuleInfoFromYamlStatusEnum, ModuleRunSettingTypes, ModuleScope, ModuleSourceType, ModuleType, ModuleUpdateOperationType, ModuleWorkingMechanism, NCrossValidationMode, NodeCompositionMode, NodesValueType, Orientation, OutputMechanism, ParameterType, ParameterValueType, PipelineDraftMode, PipelineRunStatusCode, PipelineStatusCode, PipelineType, PortAction, PrimaryMetrics, ProvisioningState, RealTimeEndpointInternalStepCode, RealTimeEndpointOpCode, RealTimeEndpointOpStatusCode, RecurrenceFrequency, RunDisplayNameGenerationType, RunSettingParameterType, RunSettingUIWidgetTypeEnum, RunStatus, RunType, RuntimeStatusEnum, RuntimeType, SamplingAlgorithmType, ScheduleProvisioningStatus, ScheduleStatus, ScheduleType, ScopeType, ScriptType, SeasonalityMode, Section, SessionSetupModeEnum, SetupFlowSessionAction, SeverityLevel, ShortSeriesHandlingConfiguration, StackMetaLearnerType, StorageAuthType, StoredProcedureParameterType, SuccessfulCommandReturnCode, TabularTrainingMode, TargetAggregationFunction, TargetLagsMode, TargetRollingWindowSizeMode, TaskCreationOptions, TaskStatus, TaskStatusCode, TaskType, ToolFuncCallScenario, ToolState, ToolType, TrainingOutputType, TriggerOperationType, TriggerType, UIInputDataDeliveryMode, UIScriptLanguageEnum, UIWidgetTypeEnum, UploadState, UseStl, UserType, ValidationStatus, ValueType, VmPriority, WebServiceState, WeekDays, Weekday, YarnDeployMode, ) __all__ = [ 'ACIAdvanceSettings', 'AEVAComputeConfiguration', 'AEVAResourceConfiguration', 'AISuperComputerConfiguration', 'AISuperComputerScalePolicy', 'AISuperComputerStorageReferenceConfiguration', 'AKSAdvanceSettings', 'AKSReplicaStatus', 'AMLComputeConfiguration', 'APCloudConfiguration', 'Activate', 'AdditionalErrorInfo', 'AdhocTriggerScheduledCommandJobRequest', 'AdhocTriggerScheduledSparkJobRequest', 'AetherAPCloudConfiguration', 'AetherAmlDataset', 'AetherAmlSparkCloudSetting', 'AetherArgumentAssignment', 'AetherAssetDefinition', 'AetherAssetOutputSettings', 'AetherAutoFeaturizeConfiguration', 'AetherAutoMLComponentConfiguration', 'AetherAutoTrainConfiguration', 'AetherAzureBlobReference', 'AetherAzureDataLakeGen2Reference', 'AetherAzureDataLakeReference', 'AetherAzureDatabaseReference', 'AetherAzureFilesReference', 'AetherBatchAiComputeInfo', 'AetherBuildArtifactInfo', 'AetherCloudBuildDropPathInfo', 'AetherCloudBuildInfo', 'AetherCloudBuildQueueInfo', 'AetherCloudPrioritySetting', 'AetherCloudSettings', 'AetherColumnTransformer', 'AetherComputeConfiguration', 'AetherComputeSetting', 'AetherControlInput', 'AetherControlOutput', 'AetherCopyDataTask', 'AetherCosmosReference', 'AetherCreatedBy', 'AetherCustomReference', 'AetherDBFSReference', 'AetherDataLocation', 'AetherDataLocationReuseCalculationFields', 'AetherDataPath', 'AetherDataReference', 'AetherDataSetDefinition', 'AetherDataSetDefinitionValue', 'AetherDataSettings', 'AetherDataTransferCloudConfiguration', 'AetherDataTransferSink', 'AetherDataTransferSource', 'AetherDataTransferV2CloudSetting', 'AetherDatabaseSink', 'AetherDatabaseSource', 'AetherDatabricksComputeInfo', 'AetherDatasetOutput', 'AetherDatasetOutputOptions', 'AetherDatasetRegistration', 'AetherDatastoreSetting', 'AetherDoWhileControlFlowInfo', 'AetherDoWhileControlFlowRunSettings', 'AetherDockerSettingConfiguration', 'AetherEntityInterfaceDocumentation', 'AetherEntrySetting', 'AetherEnvironmentConfiguration', 'AetherEsCloudConfiguration', 'AetherExportDataTask', 'AetherFeaturizationSettings', 'AetherFileSystem', 'AetherForecastHorizon', 'AetherForecastingSettings', 'AetherGeneralSettings', 'AetherGlobsOptions', 'AetherGraphControlNode', 'AetherGraphControlReferenceNode', 'AetherGraphDatasetNode', 'AetherGraphEdge', 'AetherGraphEntity', 'AetherGraphModuleNode', 'AetherGraphReferenceNode', 'AetherHdfsReference', 'AetherHdiClusterComputeInfo', 'AetherHdiRunConfiguration', 'AetherHyperDriveConfiguration', 'AetherIdentitySetting', 'AetherImportDataTask', 'AetherInputSetting', 'AetherInteractiveConfig', 'AetherK8SConfiguration', 'AetherLegacyDataPath', 'AetherLimitSettings', 'AetherMlcComputeInfo', 'AetherModuleEntity', 'AetherModuleExtendedProperties', 'AetherNCrossValidations', 'AetherOutputSetting', 'AetherParallelForControlFlowInfo', 'AetherParameterAssignment', 'AetherPhillyHdfsReference', 'AetherPortInfo', 'AetherPriorityConfig', 'AetherPriorityConfiguration', 'AetherRegisteredDataSetReference', 'AetherRemoteDockerComputeInfo', 'AetherResourceAssignment', 'AetherResourceAttributeAssignment', 'AetherResourceAttributeDefinition', 'AetherResourceConfig', 'AetherResourceConfiguration', 'AetherResourceModel', 'AetherResourcesSetting', 'AetherSavedDataSetReference', 'AetherScopeCloudConfiguration', 'AetherSeasonality', 'AetherSqlDataPath', 'AetherStackEnsembleSettings', 'AetherStoredProcedureParameter', 'AetherStructuredInterface', 'AetherStructuredInterfaceInput', 'AetherStructuredInterfaceOutput', 'AetherStructuredInterfaceParameter', 'AetherSubGraphConfiguration', 'AetherSweepEarlyTerminationPolicy', 'AetherSweepSettings', 'AetherSweepSettingsLimits', 'AetherTargetLags', 'AetherTargetRollingWindowSize', 'AetherTargetSelectorConfiguration', 'AetherTestDataSettings', 'AetherTorchDistributedConfiguration', 'AetherTrainingOutput', 'AetherTrainingSettings', 'AetherUIAzureOpenAIDeploymentNameSelector', 'AetherUIAzureOpenAIModelCapabilities', 'AetherUIColumnPicker', 'AetherUIJsonEditor', 'AetherUIParameterHint', 'AetherUIPromptFlowConnectionSelector', 'AetherValidationDataSettings', 'AetherVsoBuildArtifactInfo', 'AetherVsoBuildDefinitionInfo', 'AetherVsoBuildInfo', 'AmlDataset', 'AmlK8SConfiguration', 'AmlK8SPriorityConfiguration', 'AmlSparkCloudSetting', 'ApiAndParameters', 'ApplicationEndpointConfiguration', 'ArgumentAssignment', 'Asset', 'AssetDefinition', 'AssetNameAndVersionIdentifier', 'AssetOutputSettings', 'AssetOutputSettingsParameter', 'AssetPublishResult', 'AssetPublishSingleRegionResult', 'AssetTypeMetaInfo', 'AssetVersionPublishRequest', 'AssignedUser', 'AuthKeys', 'AutoClusterComputeSpecification', 'AutoDeleteSetting', 'AutoFeaturizeConfiguration', 'AutoMLComponentConfiguration', 'AutoScaler', 'AutoTrainConfiguration', 'AutologgerSettings', 'AvailabilityResponse', 'AzureBlobReference', 'AzureDataLakeGen2Reference', 'AzureDataLakeReference', 'AzureDatabaseReference', 'AzureFilesReference', 'AzureMLModuleVersionDescriptor', 'AzureOpenAIDeploymentDto', 'AzureOpenAIModelCapabilities', 'BatchAiComputeInfo', 'BatchDataInput', 'BatchExportComponentSpecResponse', 'BatchExportRawComponentResponse', 'BatchGetComponentHashesRequest', 'BatchGetComponentRequest', 'Binding', 'BulkTestDto', 'CloudError', 'CloudPrioritySetting', 'CloudSettings', 'ColumnTransformer', 'CommandJob', 'CommandJobLimits', 'CommandReturnCodeConfig', 'ComponentConfiguration', 'ComponentInput', 'ComponentJob', 'ComponentJobInput', 'ComponentJobOutput', 'ComponentNameAndDefaultVersion', 'ComponentNameMetaInfo', 'ComponentOutput', 'ComponentPreflightResult', 'ComponentSpecMetaInfo', 'ComponentUpdateRequest', 'ComponentValidationRequest', 'ComponentValidationResponse', 'Compute', 'ComputeConfiguration', 'ComputeContract', 'ComputeIdentityContract', 'ComputeIdentityDto', 'ComputeInfo', 'ComputeProperties', 'ComputeRPUserAssignedIdentity', 'ComputeRequest', 'ComputeSetting', 'ComputeStatus', 'ComputeStatusDetail', 'ComputeWarning', 'ConnectionConfigSpec', 'ConnectionDto', 'ConnectionEntity', 'ConnectionOverrideSetting', 'ConnectionSpec', 'ContainerInstanceConfiguration', 'ContainerRegistry', 'ContainerResourceRequirements', 'ControlInput', 'ControlOutput', 'CopyDataTask', 'CreateFlowFromSampleRequest', 'CreateFlowRequest', 'CreateFlowRuntimeRequest', 'CreateFlowSessionRequest', 'CreateInferencePipelineRequest', 'CreateOrUpdateConnectionRequest', 'CreateOrUpdateConnectionRequestDto', 'CreatePipelineDraftRequest', 'CreatePipelineJobScheduleDto', 'CreatePublishedPipelineRequest', 'CreateRealTimeEndpointRequest', 'CreatedBy', 'CreatedFromDto', 'CreationContext', 'Cron', 'CustomConnectionConfig', 'CustomReference', 'DBFSReference', 'Data', 'DataInfo', 'DataLocation', 'DataPath', 'DataPathParameter', 'DataPortDto', 'DataReference', 'DataReferenceConfiguration', 'DataSetDefinition', 'DataSetDefinitionValue', 'DataSetPathParameter', 'DataSettings', 'DataTransferCloudConfiguration', 'DataTransferSink', 'DataTransferSource', 'DataTransferV2CloudSetting', 'DataTypeCreationInfo', 'DatabaseSink', 'DatabaseSource', 'DatabricksComputeInfo', 'DatabricksConfiguration', 'DatacacheConfiguration', 'DatasetIdentifier', 'DatasetInputDetails', 'DatasetLineage', 'DatasetOutput', 'DatasetOutputDetails', 'DatasetOutputOptions', 'DatasetRegistration', 'DatasetRegistrationOptions', 'DatastoreSetting', 'DbfsStorageInfoDto', 'DebugInfoResponse', 'DeployFlowRequest', 'DeploymentInfo', 'DistributionConfiguration', 'DistributionParameter', 'DoWhileControlFlowInfo', 'DoWhileControlFlowRunSettings', 'DockerBuildContext', 'DockerConfiguration', 'DockerImagePlatform', 'DockerSection', 'DockerSettingConfiguration', 'DownloadResourceInfo', 'EPRPipelineRunErrorClassificationRequest', 'EndpointSetting', 'EntityInterface', 'EntrySetting', 'EnumParameterRule', 'EnvironmentConfiguration', 'EnvironmentDefinition', 'EnvironmentDefinitionDto', 'ErrorAdditionalInfo', 'ErrorResponse', 'EsCloudConfiguration', 'EvaluationFlowRunSettings', 'ExampleRequest', 'ExecutionContextDto', 'ExecutionDataLocation', 'ExecutionDataPath', 'ExecutionGlobsOptions', 'ExperimentComputeMetaInfo', 'ExperimentInfo', 'ExportComponentMetaInfo', 'ExportDataTask', 'FeaturizationSettings', 'FeedDto', 'FeedDtoSupportedAssetTypes', 'FileSystem', 'Flow', 'FlowAnnotations', 'FlowBaseDto', 'FlowDto', 'FlowEnvironment', 'FlowFeature', 'FlowFeatureState', 'FlowGraph', 'FlowGraphAnnotationNode', 'FlowGraphLayout', 'FlowGraphReference', 'FlowIndexEntity', 'FlowInputDefinition', 'FlowNode', 'FlowNodeLayout', 'FlowNodeVariant', 'FlowOutputDefinition', 'FlowProperties', 'FlowRunBasePath', 'FlowRunInfo', 'FlowRunResult', 'FlowRunSettings', 'FlowRuntimeCapability', 'FlowRuntimeDto', 'FlowSampleDto', 'FlowSessionDto', 'FlowSnapshot', 'FlowSubmitRunSettings', 'FlowTestInfo', 'FlowTestStorageSetting', 'FlowToolSettingParameter', 'FlowToolsDto', 'FlowVariantNode', 'ForecastHorizon', 'ForecastingSettings', 'GeneralSettings', 'GeneratePipelineComponentRequest', 'GenerateToolMetaRequest', 'GetDynamicListRequest', 'GetRunDataResultDto', 'GetTrainingSessionDto', 'GlobalJobDispatcherConfiguration', 'GlobsOptions', 'GraphAnnotationNode', 'GraphControlNode', 'GraphControlReferenceNode', 'GraphDatasetNode', 'GraphDraftEntity', 'GraphEdge', 'GraphLayout', 'GraphLayoutCreationInfo', 'GraphModuleNode', 'GraphModuleNodeRunSetting', 'GraphModuleNodeUIInputSetting', 'GraphNodeStatusInfo', 'GraphReferenceNode', 'HdfsReference', 'HdiClusterComputeInfo', 'HdiConfiguration', 'HdiRunConfiguration', 'HistoryConfiguration', 'HyperDriveConfiguration', 'ICheckableLongRunningOperationResponse', 'IdentityConfiguration', 'IdentitySetting', 'ImportDataTask', 'IndexedErrorResponse', 'InitScriptInfoDto', 'InnerErrorDetails', 'InnerErrorResponse', 'InputAsset', 'InputData', 'InputDataBinding', 'InputDefinition', 'InputOutputPortMetadata', 'InputSetting', 'IntellectualPropertyPublisherInformation', 'InteractiveConfig', 'InteractiveConfiguration', 'JobCost', 'JobEndpoint', 'JobInput', 'JobOutput', 'JobOutputArtifacts', 'JobScheduleDto', 'K8SConfiguration', 'KeyValuePairComponentNameMetaInfoErrorResponse', 'KeyValuePairComponentNameMetaInfoModuleDto', 'KeyValuePairStringObject', 'KubernetesConfiguration', 'Kwarg', 'LegacyDataPath', 'LimitSettings', 'LinkedADBWorkspaceMetadata', 'LinkedPipelineInfo', 'LoadFlowAsComponentRequest', 'LogRunTerminatedEventDto', 'LongRunningOperationUriResponse', 'LongRunningUpdateRegistryComponentRequest', 'ManagedServiceIdentity', 'MavenLibraryDto', 'MetricProperties', 'MetricSchemaDto', 'MetricSchemaPropertyDto', 'MetricV2Dto', 'MetricV2Value', 'MfeInternalAutologgerSettings', 'MfeInternalIdentityConfiguration', 'MfeInternalNodes', 'MfeInternalOutputData', 'MfeInternalSecretConfiguration', 'MfeInternalUriReference', 'MfeInternalV20211001ComponentJob', 'MinMaxParameterRule', 'MlcComputeInfo', 'ModelDto', 'ModelManagementErrorResponse', 'ModifyPipelineJobScheduleDto', 'ModuleDto', 'ModuleDtoWithErrors', 'ModuleDtoWithValidateStatus', 'ModuleEntity', 'ModulePythonInterface', 'MpiConfiguration', 'NCrossValidations', 'Node', 'NodeInputPort', 'NodeLayout', 'NodeOutputPort', 'NodePortInterface', 'NodeSource', 'NodeTelemetryMetaInfo', 'NodeVariant', 'Nodes', 'NoteBookTaskDto', 'NotificationSetting', 'ODataError', 'ODataErrorDetail', 'ODataErrorResponse', 'ODataInnerError', 'OutputData', 'OutputDataBinding', 'OutputDatasetLineage', 'OutputDefinition', 'OutputOptions', 'OutputSetting', 'OutputSettingSpec', 'PaginatedDataInfoList', 'PaginatedModelDtoList', 'PaginatedModuleDtoList', 'PaginatedPipelineDraftSummaryList', 'PaginatedPipelineEndpointSummaryList', 'PaginatedPipelineRunSummaryList', 'PaginatedPublishedPipelineSummaryList', 'ParallelForControlFlowInfo', 'ParallelTaskConfiguration', 'Parameter', 'ParameterAssignment', 'ParameterDefinition', 'PatchFlowRequest', 'Pipeline', 'PipelineDraft', 'PipelineDraftStepDetails', 'PipelineDraftSummary', 'PipelineEndpoint', 'PipelineEndpointSummary', 'PipelineGraph', 'PipelineInput', 'PipelineJob', 'PipelineJobRuntimeBasicSettings', 'PipelineJobScheduleDto', 'PipelineOutput', 'PipelineRun', 'PipelineRunGraphDetail', 'PipelineRunGraphStatus', 'PipelineRunProfile', 'PipelineRunStatus', 'PipelineRunStepDetails', 'PipelineRunSummary', 'PipelineStatus', 'PipelineStepRun', 'PipelineStepRunOutputs', 'PipelineSubDraft', 'PolicyValidationResponse', 'PortInfo', 'PortOutputInfo', 'PriorityConfig', 'PriorityConfiguration', 'PromoteDataSetRequest', 'ProviderEntity', 'PublishedPipeline', 'PublishedPipelineSummary', 'PyTorchConfiguration', 'PythonInterfaceMapping', 'PythonPyPiOrRCranLibraryDto', 'PythonSection', 'QueueingInfo', 'RCranPackage', 'RGitHubPackage', 'RSection', 'RawComponentDto', 'RayConfiguration', 'RealTimeEndpoint', 'RealTimeEndpointInfo', 'RealTimeEndpointStatus', 'RealTimeEndpointSummary', 'RealTimeEndpointTestRequest', 'Recurrence', 'RecurrencePattern', 'RecurrenceSchedule', 'RegenerateServiceKeysRequest', 'RegisterComponentMetaInfo', 'RegisterComponentMetaInfoExtraHashes', 'RegisterComponentMetaInfoIdentifierHashes', 'RegisterRegistryComponentMetaInfo', 'RegisterRegistryComponentMetaInfoExtraHashes', 'RegisterRegistryComponentMetaInfoIdentifierHashes', 'RegisteredDataSetReference', 'RegistrationOptions', 'RegistryBlobReferenceData', 'RegistryIdentity', 'Relationship', 'RemoteDockerComputeInfo', 'ResourceConfig', 'ResourceConfiguration', 'ResourcesSetting', 'RetrieveToolFuncResultRequest', 'RetryConfiguration', 'RootError', 'RunAnnotations', 'RunConfiguration', 'RunDatasetReference', 'RunDefinition', 'RunDetailsDto', 'RunDetailsWarningDto', 'RunDto', 'RunIndexEntity', 'RunIndexMetricSummary', 'RunIndexMetricSummarySystemObject', 'RunIndexResourceMetricSummary', 'RunMetricDto', 'RunMetricsTypesDto', 'RunProperties', 'RunSettingParameter', 'RunSettingParameterAssignment', 'RunSettingUIParameterHint', 'RunStatusPeriod', 'RunTypeV2', 'RunTypeV2Index', 'RuntimeConfiguration', 'SampleMeta', 'SavePipelineDraftRequest', 'SavedDataSetReference', 'ScheduleBase', 'SchemaContractsCreatedBy', 'ScopeCloudConfiguration', 'Seasonality', 'SecretConfiguration', 'SegmentedResult1', 'ServiceLogRequest', 'SessionApplication', 'SessionApplicationRunCommandResult', 'SessionProperties', 'SetupFlowSessionRequest', 'SharingScope', 'Snapshot', 'SnapshotInfo', 'SourceCodeDataReference', 'SparkConfiguration', 'SparkJarTaskDto', 'SparkJob', 'SparkJobEntry', 'SparkMavenPackage', 'SparkPythonTaskDto', 'SparkResourceConfiguration', 'SparkSection', 'SparkSubmitTaskDto', 'SqlDataPath', 'StackEnsembleSettings', 'StandbyPoolProperties', 'StandbyPoolResourceStatus', 'StartRunResult', 'StepRunProfile', 'StorageInfo', 'StoredProcedureParameter', 'Stream', 'StructuredInterface', 'StructuredInterfaceInput', 'StructuredInterfaceOutput', 'StructuredInterfaceParameter', 'StudioMigrationInfo', 'SubGraphConcatenateAssignment', 'SubGraphConfiguration', 'SubGraphConnectionInfo', 'SubGraphDataPathParameterAssignment', 'SubGraphInfo', 'SubGraphParameterAssignment', 'SubGraphPortInfo', 'SubPipelineDefinition', 'SubPipelineParameterAssignment', 'SubPipelinesInfo', 'SubStatusPeriod', 'SubmitBulkRunRequest', 'SubmitBulkRunResponse', 'SubmitFlowRequest', 'SubmitPipelineRunRequest', 'SweepEarlyTerminationPolicy', 'SweepSettings', 'SweepSettingsLimits', 'SystemData', 'SystemMeta', 'SystemMetaExtraHashes', 'SystemMetaIdentifierHashes', 'TargetLags', 'TargetRollingWindowSize', 'TargetSelectorConfiguration', 'Task', 'TaskControlFlowInfo', 'TaskReuseInfo', 'TensorflowConfiguration', 'TestDataSettings', 'Tool', 'ToolFuncResponse', 'ToolInputDynamicList', 'ToolInputGeneratedBy', 'ToolMetaDto', 'ToolSetting', 'ToolSourceMeta', 'TorchDistributedConfiguration', 'TrainingDiagnosticConfiguration', 'TrainingOutput', 'TrainingSettings', 'TriggerAsyncOperationStatus', 'TuningNodeSetting', 'TypedAssetReference', 'UIAzureOpenAIDeploymentNameSelector', 'UIAzureOpenAIModelCapabilities', 'UIColumnPicker', 'UIComputeSelection', 'UIHyperparameterConfiguration', 'UIInputSetting', 'UIJsonEditor', 'UIParameterHint', 'UIPromptFlowConnectionSelector', 'UIWidgetMetaInfo', 'UIYamlEditor', 'UnversionedEntityRequestDto', 'UnversionedEntityResponseDto', 'UnversionedRebuildIndexDto', 'UnversionedRebuildResponseDto', 'UpdateComponentRequest', 'UpdateFlowRequest', 'UpdateFlowRuntimeRequest', 'UpdateRegistryComponentRequest', 'UploadOptions', 'UriReference', 'User', 'UserAssignedIdentity', 'ValidationDataSettings', 'VariantNode', 'WebServiceComputeMetaInfo', 'WebServicePort', 'Webhook', 'WorkspaceConnectionSpec', 'AEVAAssetType', 'AEVADataStoreMode', 'AEVAIdentityType', 'ActionType', 'AetherArgumentValueType', 'AetherAssetType', 'AetherBuildSourceType', 'AetherComputeType', 'AetherControlFlowType', 'AetherControlInputValue', 'AetherDataCopyMode', 'AetherDataLocationStorageType', 'AetherDataReferenceType', 'AetherDataStoreMode', 'AetherDataTransferStorageType', 'AetherDataTransferTaskType', 'AetherDatasetType', 'AetherEarlyTerminationPolicyType', 'AetherEntityStatus', 'AetherExecutionEnvironment', 'AetherExecutionPhase', 'AetherFeaturizationMode', 'AetherFileBasedPathType', 'AetherForecastHorizonMode', 'AetherIdentityType', 'AetherLogVerbosity', 'AetherModuleDeploymentSource', 'AetherModuleHashVersion', 'AetherModuleType', 'AetherNCrossValidationMode', 'AetherParameterType', 'AetherParameterValueType', 'AetherPrimaryMetrics', 'AetherRepositoryType', 'AetherResourceOperator', 'AetherResourceValueType', 'AetherSamplingAlgorithmType', 'AetherSeasonalityMode', 'AetherShortSeriesHandlingConfiguration', 'AetherStackMetaLearnerType', 'AetherStoredProcedureParameterType', 'AetherTabularTrainingMode', 'AetherTargetAggregationFunction', 'AetherTargetLagsMode', 'AetherTargetRollingWindowSizeMode', 'AetherTaskType', 'AetherTrainingOutputType', 'AetherUIScriptLanguageEnum', 'AetherUIWidgetTypeEnum', 'AetherUploadState', 'AetherUseStl', 'ApplicationEndpointType', 'ArgumentValueType', 'AssetScopeTypes', 'AssetSourceType', 'AssetType', 'AutoDeleteCondition', 'BuildContextLocationType', 'Communicator', 'ComponentRegistrationTypeEnum', 'ComponentType', 'ComputeEnvironmentType', 'ComputeTargetType', 'ComputeType', 'ConfigValueType', 'ConnectionCategory', 'ConnectionScope', 'ConnectionSourceType', 'ConnectionType', 'ConsumeMode', 'ControlFlowType', 'ControlInputValue', 'DataBindingMode', 'DataCategory', 'DataCopyMode', 'DataLocationStorageType', 'DataPortType', 'DataReferenceType', 'DataSourceType', 'DataStoreMode', 'DataTransferStorageType', 'DataTransferTaskType', 'DataTypeMechanism', 'DatasetAccessModes', 'DatasetConsumptionType', 'DatasetDeliveryMechanism', 'DatasetOutputType', 'DatasetType', 'DeliveryMechanism', 'DistributionParameterEnum', 'DistributionType', 'EarlyTerminationPolicyType', 'EmailNotificationEnableType', 'EndpointAuthMode', 'EntityKind', 'EntityStatus', 'ErrorHandlingMode', 'ExecutionPhase', 'FeaturizationMode', 'FlowFeatureStateEnum', 'FlowLanguage', 'FlowPatchOperationType', 'FlowRunMode', 'FlowRunTypeEnum', 'FlowRuntimeSubmissionApiVersion', 'FlowTestMode', 'FlowType', 'ForecastHorizonMode', 'Framework', 'Frequency', 'GlobalJobDispatcherSupportedComputeType', 'GraphComponentsMode', 'GraphDatasetsLoadModes', 'GraphSdkCodeType', 'HttpStatusCode', 'IdentityType', 'InputType', 'IntellectualPropertyAccessMode', 'JobInputType', 'JobLimitsType', 'JobOutputType', 'JobProvisioningState', 'JobStatus', 'JobType', 'KeyType', 'ListViewType', 'LogLevel', 'LogVerbosity', 'LongRunningUpdateType', 'MLFlowAutologgerState', 'ManagedServiceIdentityType', 'MetricValueType', 'MfeInternalIdentityType', 'MfeInternalMLFlowAutologgerState', 'MfeInternalScheduleStatus', 'ModuleDtoFields', 'ModuleInfoFromYamlStatusEnum', 'ModuleRunSettingTypes', 'ModuleScope', 'ModuleSourceType', 'ModuleType', 'ModuleUpdateOperationType', 'ModuleWorkingMechanism', 'NCrossValidationMode', 'NodeCompositionMode', 'NodesValueType', 'Orientation', 'OutputMechanism', 'ParameterType', 'ParameterValueType', 'PipelineDraftMode', 'PipelineRunStatusCode', 'PipelineStatusCode', 'PipelineType', 'PortAction', 'PrimaryMetrics', 'ProvisioningState', 'RealTimeEndpointInternalStepCode', 'RealTimeEndpointOpCode', 'RealTimeEndpointOpStatusCode', 'RecurrenceFrequency', 'RunDisplayNameGenerationType', 'RunSettingParameterType', 'RunSettingUIWidgetTypeEnum', 'RunStatus', 'RunType', 'RuntimeStatusEnum', 'RuntimeType', 'SamplingAlgorithmType', 'ScheduleProvisioningStatus', 'ScheduleStatus', 'ScheduleType', 'ScopeType', 'ScriptType', 'SeasonalityMode', 'Section', 'SessionSetupModeEnum', 'SetupFlowSessionAction', 'SeverityLevel', 'ShortSeriesHandlingConfiguration', 'StackMetaLearnerType', 'StorageAuthType', 'StoredProcedureParameterType', 'SuccessfulCommandReturnCode', 'TabularTrainingMode', 'TargetAggregationFunction', 'TargetLagsMode', 'TargetRollingWindowSizeMode', 'TaskCreationOptions', 'TaskStatus', 'TaskStatusCode', 'TaskType', 'ToolFuncCallScenario', 'ToolState', 'ToolType', 'TrainingOutputType', 'TriggerOperationType', 'TriggerType', 'UIInputDataDeliveryMode', 'UIScriptLanguageEnum', 'UIWidgetTypeEnum', 'UploadState', 'UseStl', 'UserType', 'ValidationStatus', 'ValueType', 'VmPriority', 'WebServiceState', 'WeekDays', 'Weekday', 'YarnDeployMode', ]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_constants/_component.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from pathlib import Path RESOURCE_FOLDER = Path(__file__).parent.parent / "resources" COMMAND_COMPONENT_SPEC_TEMPLATE = RESOURCE_FOLDER / "component_spec_template.yaml" DEFAULT_PYTHON_VERSION = "3.9"
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_constants/_flow.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- class FlowType: STANDARD = "standard" CHAT = "chat" EVALUATION = "evaluate" class FlowJobType: STANDARD = "azureml.promptflow.FlowRun" EVALUATION = "azureml.promptflow.EvaluationRun" # Use this storage since it's the storage used by notebook DEFAULT_STORAGE = "workspaceworkingdirectory" PROMPTFLOW_FILE_SHARE_DIR = "promptflow" CLOUD_RUNS_PAGE_SIZE = 25 # align with UX SESSION_CREATION_TIMEOUT_SECONDS = 10 * 60 # 10 minutes SESSION_CREATION_TIMEOUT_ENV_VAR = "PROMPTFLOW_SESSION_CREATION_TIMEOUT_SECONDS" ENVIRONMENT = "environment" PYTHON_REQUIREMENTS_TXT = "python_requirements_txt" ADDITIONAL_INCLUDES = "additional_includes" BASE_IMAGE = "image" AUTOMATIC_RUNTIME_NAME = "automatic" AUTOMATIC_RUNTIME = "automatic runtime"
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_constants/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore from ._component import COMMAND_COMPONENT_SPEC_TEMPLATE, DEFAULT_PYTHON_VERSION from ._flow import FlowJobType, FlowType __all__ = ["FlowJobType", "FlowType", "DEFAULT_PYTHON_VERSION", "COMMAND_COMPONENT_SPEC_TEMPLATE"]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_flow_operations.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- # pylint: disable=protected-access import copy import json import os import re from datetime import datetime from functools import cached_property from pathlib import Path from typing import Dict, List, Optional, Union import requests from azure.ai.ml._artifacts._artifact_utilities import _check_and_upload_path from azure.ai.ml._scope_dependent_operations import ( OperationConfig, OperationsContainer, OperationScope, _ScopeDependentOperations, ) from azure.ai.ml.constants._common import SHORT_URI_FORMAT from azure.ai.ml.entities import Workspace from azure.ai.ml.operations._operation_orchestrator import OperationOrchestrator from azure.core.exceptions import HttpResponseError from promptflow._sdk._constants import ( CLIENT_FLOW_TYPE_2_SERVICE_FLOW_TYPE, DAG_FILE_NAME, MAX_LIST_CLI_RESULTS, WORKSPACE_LINKED_DATASTORE_NAME, FlowType, ListViewType, ) from promptflow._sdk._errors import FlowOperationError from promptflow._sdk._telemetry import ActivityType, WorkspaceTelemetryMixin, monitor_operation from promptflow._sdk._utils import PromptflowIgnoreFile from promptflow._sdk._vendor._asset_utils import traverse_directory from promptflow._utils.logger_utils import get_cli_sdk_logger from promptflow.azure._constants._flow import DEFAULT_STORAGE from promptflow.azure._entities._flow import Flow from promptflow.azure._load_functions import load_flow from promptflow.azure._restclient.flow_service_caller import FlowServiceCaller from promptflow.azure.operations._artifact_utilities import _get_datastore_name, get_datastore_info from promptflow.azure.operations._fileshare_storeage_helper import FlowFileStorageClient from promptflow.exceptions import SystemErrorException, UserErrorException logger = get_cli_sdk_logger() class FlowOperations(WorkspaceTelemetryMixin, _ScopeDependentOperations): """FlowOperations that can manage flows. You should not instantiate this class directly. Instead, you should create a :class:`~promptflow.azure.PFClient` instance and this operation is available as the instance's attribute. """ _FLOW_RESOURCE_PATTERN = re.compile(r"azureml:.*?/workspaces/(?P<experiment_id>.*?)/flows/(?P<flow_id>.*?)$") def __init__( self, operation_scope: OperationScope, operation_config: OperationConfig, all_operations: OperationsContainer, credential, service_caller: FlowServiceCaller, workspace: Workspace, **kwargs: Dict, ): super().__init__( operation_scope=operation_scope, operation_config=operation_config, workspace_name=operation_scope.workspace_name, subscription_id=operation_scope.subscription_id, resource_group_name=operation_scope.resource_group_name, ) self._all_operations = all_operations self._service_caller = service_caller self._credential = credential self._workspace = workspace @cached_property def _workspace_id(self): return self._workspace._workspace_id @cached_property def _index_service_endpoint_url(self): """Get the endpoint url for the workspace.""" endpoint = self._service_caller._service_endpoint return endpoint + "index/v1.0" + self._service_caller._common_azure_url_pattern @monitor_operation(activity_name="pfazure.flows.create_or_update", activity_type=ActivityType.PUBLICAPI) def create_or_update(self, flow: Union[str, Path], display_name=None, type=None, **kwargs) -> Flow: """Create a flow to remote from local source, or update the metadata of an existing flow. .. note:: Functionality of updating flow metadata is yet to be supported. :param flow: The source of the flow to create. :type flow: Union[str, Path] :param display_name: The display name of the flow to create. Default to be flow folder name + timestamp if not specified. e.g. "web-classification-10-27-2023-14-19-10" :type display_name: str :param type: The type of the flow to create. One of ["standard", evaluation", "chat"]. Default to be "standard" if not specified. :type type: str :param description: The description of the flow to create. Default to be the description in flow yaml file. :type description: str :param tags: The tags of the flow to create. Default to be the tags in flow yaml file. :type tags: Dict[str, str] """ # validate the parameters azure_flow, flow_display_name, flow_type, kwargs = FlowOperations._validate_flow_creation_parameters( flow, display_name, type, **kwargs ) # upload to file share file_share_flow_path = self._resolve_flow_code_and_upload_to_file_share(flow=azure_flow) if not file_share_flow_path: raise FlowOperationError(f"File share path should not be empty, got {file_share_flow_path!r}.") # create flow to remote flow_definition_file_path = f"{file_share_flow_path}/{DAG_FILE_NAME}" rest_flow = self._create_remote_flow_via_file_share_path( flow_display_name=flow_display_name, flow_type=flow_type, flow_definition_file_path=flow_definition_file_path, **kwargs, ) result_flow = Flow._from_pf_service(rest_flow) flow_dict = result_flow._to_dict() print(f"Flow created successfully:\n{json.dumps(flow_dict, indent=4)}") return result_flow @staticmethod def _validate_flow_creation_parameters(source, flow_display_name=None, flow_type=None, **kwargs): """Validate the parameters for flow creation operation.""" # validate the source folder logger.info("Validating flow source.") if not Path(source, DAG_FILE_NAME).exists(): raise UserErrorException( f"Flow source must be a directory with flow definition yaml '{DAG_FILE_NAME}'. " f"Got {Path(source).resolve().as_posix()!r}." ) # validate flow source with flow schema logger.info("Validating flow schema.") flow_dict = FlowOperations._validate_flow_schema(source, flow_display_name, flow_type, **kwargs) logger.info("Validating flow creation parameters.") flow = load_flow(source) # if no flow name specified, use "flow name + timestamp" flow_display_name = flow_dict.get("display_name", None) if not flow_display_name: flow_display_name = f"{Path(source).name}-{datetime.now().strftime('%m-%d-%Y-%H-%M-%S')}" # if no flow type specified, use default flow type "standard" flow_type = flow_dict.get("type", None) if not flow_type: flow_type = FlowType.STANDARD # update description and tags to be the final value description = flow_dict.get("description", None) if isinstance(description, str): kwargs["description"] = description tags = flow_dict.get("tags", None) if tags: kwargs["tags"] = tags return flow, flow_display_name, flow_type, kwargs @staticmethod def _validate_flow_schema(source, display_name=None, type=None, **kwargs): """Validate the flow schema.""" from promptflow._sdk.entities._flow import ProtectedFlow params_override = copy.deepcopy(kwargs) if display_name is not None: params_override["display_name"] = display_name if type is not None: params_override["type"] = type flow_entity = ProtectedFlow.load(source=source, params_override=params_override) flow_entity._validate(raise_error=True) # raise error if validation failed flow_dict = flow_entity._dump_for_validation() return flow_dict def _resolve_flow_code_and_upload_to_file_share(self, flow: Flow, ignore_tools_json=False) -> str: remote_file_share_folder_name = f"{Path(flow.code).name}-{datetime.now().strftime('%m-%d-%Y-%H-%M-%S')}" ops = OperationOrchestrator(self._all_operations, self._operation_scope, self._operation_config) file_share_flow_path = "" logger.info("Building flow code.") with flow._build_code() as code: if code is None: raise FlowOperationError("Failed to build flow code.") # ignore flow.tools.json if needed (e.g. for flow run scenario) if ignore_tools_json: ignore_file = code._ignore_file if isinstance(ignore_file, PromptflowIgnoreFile): ignore_file._ignore_tools_json = ignore_tools_json else: raise FlowOperationError( message=f"Flow code should have PromptflowIgnoreFile, got {type(ignore_file)}" ) code.datastore = DEFAULT_STORAGE datastore_name = _get_datastore_name(datastore_name=DEFAULT_STORAGE) datastore_operation = ops._code_assets._datastore_operation datastore_info = get_datastore_info(datastore_operation, datastore_name) logger.debug("Creating storage client for uploading flow to file share.") storage_client = FlowFileStorageClient( credential=datastore_info["credential"], file_share_name=datastore_info["container_name"], account_url=datastore_info["account_url"], azure_cred=datastore_operation._credential, ) # set storage client to flow operation, can be used in test case self._storage_client = storage_client # check if the file share directory exists logger.debug("Checking if the file share directory exists.") if storage_client._check_file_share_directory_exist(remote_file_share_folder_name): raise FlowOperationError( f"Remote flow folder {remote_file_share_folder_name!r} already exists under " f"'{storage_client.file_share_prefix}'. Please change the flow folder name and try again." ) try: logger.info("Uploading flow directory to file share.") storage_client.upload_dir( source=code.path, dest=remote_file_share_folder_name, msg="test", ignore_file=code._ignore_file, show_progress=False, ) except Exception as e: raise FlowOperationError(f"Failed to upload flow to file share due to: {str(e)}.") from e file_share_flow_path = f"{storage_client.file_share_prefix}/{remote_file_share_folder_name}" logger.info(f"Successfully uploaded flow to file share path {file_share_flow_path!r}.") return file_share_flow_path def _create_remote_flow_via_file_share_path( self, flow_display_name, flow_type, flow_definition_file_path, **kwargs ): """Create a flow to remote from file share path.""" service_flow_type = CLIENT_FLOW_TYPE_2_SERVICE_FLOW_TYPE[flow_type] description = kwargs.get("description", None) tags = kwargs.get("tags", None) body = { "flow_name": flow_display_name, "flow_definition_file_path": flow_definition_file_path, "flow_type": service_flow_type, "description": description, "tags": tags, } rest_flow_result = self._service_caller.create_flow( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, body=body, ) return rest_flow_result def get(self, name: str) -> Flow: """Get a flow from azure. :param name: The name of the flow to get. :type name: str :return: The flow. :rtype: ~promptflow.azure.entities.Flow """ try: rest_flow = self._service_caller.get_flow( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, flow_id=name, experiment_id=self._workspace_id, # for flow operations, current experiment id is workspace id ) except HttpResponseError as e: if e.status_code == 404: raise FlowOperationError(f"Flow {name!r} not found.") from e else: raise FlowOperationError(f"Failed to get flow {name!r} due to: {str(e)}.") from e flow = Flow._from_pf_service(rest_flow) return flow @monitor_operation(activity_name="pfazure.flows.list", activity_type=ActivityType.PUBLICAPI) def list( self, max_results: int = MAX_LIST_CLI_RESULTS, flow_type: Optional[FlowType] = None, list_view_type: ListViewType = ListViewType.ACTIVE_ONLY, include_others: bool = False, **kwargs, ) -> List[Flow]: """List flows from azure. :param max_results: The max number of runs to return, defaults to 50, max is 100 :type max_results: int :param flow_type: The flow type, defaults to None, which means all flow types. Other supported flow types are ["standard", "evaluation", "chat"]. :type flow_type: Optional[FlowType] :param list_view_type: The list view type, defaults to ListViewType.ACTIVE_ONLY :type list_view_type: ListViewType :param include_others: Whether to list flows owned by other users in the remote workspace, defaults to False :type include_others: bool :return: The list of flows. :rtype: List[~promptflow.azure.entities.Flow] """ if not isinstance(max_results, int) or max_results < 1: raise FlowOperationError(f"'max_results' must be a positive integer, got {max_results!r}") normalized_flow_type = str(flow_type).lower() if flow_type is not None and normalized_flow_type not in FlowType.get_all_values(): raise FlowOperationError(f"'flow_type' must be one of {FlowType.get_all_values()}, got {flow_type!r}.") headers = self._service_caller._get_headers() if list_view_type == ListViewType.ACTIVE_ONLY: filter_archived = ["false"] elif list_view_type == ListViewType.ARCHIVED_ONLY: filter_archived = ["true"] elif list_view_type == ListViewType.ALL: filter_archived = ["true", "false"] else: raise FlowOperationError( f"Invalid list view type: {list_view_type!r}, expecting one of ['ActiveOnly', 'ArchivedOnly', 'All']" ) user_object_id, user_tenant_id = self._service_caller._get_user_identity_info() payload = { "filters": [ {"field": "type", "operator": "eq", "values": ["flows"]}, {"field": "annotations/isArchived", "operator": "eq", "values": filter_archived}, { "field": "properties/creationContext/createdBy/userTenantId", "operator": "eq", "values": [user_tenant_id], }, ], "freeTextSearch": "", "order": [{"direction": "Desc", "field": "properties/creationContext/createdTime"}], # index service can return 100 results at most "pageSize": min(max_results, 100), "skip": 0, "includeTotalResultCount": True, "searchBuilder": "AppendPrefix", } # add flow filter to only list flows from current user if not include_others: payload["filters"].append( { "field": "properties/creationContext/createdBy/userObjectId", "operator": "eq", "values": [user_object_id], } ) endpoint = self._index_service_endpoint_url url = endpoint + "/entities" response = requests.post(url, headers=headers, json=payload) if response.status_code == 200: entities = json.loads(response.text) flow_entities = entities["value"] else: raise FlowOperationError( f"Failed to get flows from index service. Code: {response.status_code}, text: {response.text}" ) # transform to flow instances flow_instances = [] for entity in flow_entities: flow = Flow._from_index_service(entity) flow_instances.append(flow) return flow_instances def _download(self, source, dest): # TODO: support download flow raise NotImplementedError("Not implemented yet") def _resolve_arm_id_or_upload_dependencies(self, flow: Flow, ignore_tools_json=False) -> None: ops = OperationOrchestrator(self._all_operations, self._operation_scope, self._operation_config) # resolve flow's code self._try_resolve_code_for_flow(flow=flow, ops=ops, ignore_tools_json=ignore_tools_json) @classmethod def _try_resolve_code_for_flow(cls, flow: Flow, ops: OperationOrchestrator, ignore_tools_json=False) -> None: if flow.path: # remote path if flow.path.startswith("azureml://datastores"): flow._code_uploaded = True return else: raise ValueError("Path is required for flow.") with flow._build_code() as code: if code is None: return if flow._code_uploaded: return # TODO(2917889): generate flow meta for eager flow if ignore_tools_json: ignore_file = code._ignore_file if isinstance(ignore_file, PromptflowIgnoreFile): ignore_file._ignore_tools_json = ignore_tools_json else: raise SystemErrorException( message=f"Flow code should have PromptflowIgnoreFile, got {type(ignore_file)}" ) # flow directory per file upload summary # as the upload logic locates in azure-ai-ml, we cannot touch during the upload # copy the logic here to print per file upload summary ignore_file = code._ignore_file upload_paths = [] source_path = Path(code.path).resolve() prefix = os.path.basename(source_path) + "/" for root, _, files in os.walk(source_path, followlinks=True): upload_paths += list( traverse_directory( root, files, prefix=prefix, ignore_file=ignore_file, ) ) ignore_files = code._ignore_file._get_ignore_list() for file_path in ignore_files: logger.debug(f"will ignore file: {file_path}...") for file_path, _ in upload_paths: logger.debug(f"will upload file: {file_path}...") code.datastore = WORKSPACE_LINKED_DATASTORE_NAME # NOTE: For flow directory upload, we prefer to upload it to the workspace linked datastore, # therefore we will directly use _check_and_upload_path, instead of v2 SDK public API # CodeOperations.create_or_update, as later one will upload the code asset to another # container in the storage account, which may fail with vnet for MT. # However, we might run into list secret permission error(especially in Heron workspace), # in this case, we will leverage v2 SDK public API, which has solution for Heron, # and request MT with the blob url; # refer to except block for more details. try: uploaded_code_asset, _ = _check_and_upload_path( artifact=code, asset_operations=ops._code_assets, artifact_type="Code", datastore_name=WORKSPACE_LINKED_DATASTORE_NAME, # actually not work at all show_progress=True, ) path = uploaded_code_asset.path path = path[path.find("LocalUpload") :] # path on container flow.code = path # azureml://datastores/workspaceblobstore/paths/<path-to-flow-dag-yaml> flow.path = SHORT_URI_FORMAT.format( WORKSPACE_LINKED_DATASTORE_NAME, (Path(path) / flow.path).as_posix() ) except HttpResponseError as e: # catch authorization error for list secret on datastore if "AuthorizationFailed" in str(e) and "datastores/listSecrets/action" in str(e): uploaded_code_asset = ops._code_assets.create_or_update(code) path = uploaded_code_asset.path path = path.replace(".blob.core.windows.net:443/", ".blob.core.windows.net/") # remove :443 port flow.code = path # https://<storage-account-name>.blob.core.windows.net/<container-name>/<path-to-flow-dag-yaml> flow.path = f"{path}/{flow.path}" else: raise flow._code_uploaded = True # region deprecated but keep for runtime test dependencies def _resolve_arm_id_or_upload_dependencies_to_file_share(self, flow: Flow) -> None: ops = OperationOrchestrator(self._all_operations, self._operation_scope, self._operation_config) # resolve flow's code self._try_resolve_code_for_flow_to_file_share(flow=flow, ops=ops) @classmethod def _try_resolve_code_for_flow_to_file_share(cls, flow: Flow, ops: OperationOrchestrator) -> None: from azure.ai.ml._utils._storage_utils import AzureMLDatastorePathUri from ._artifact_utilities import _check_and_upload_path if flow.path: if flow.path.startswith("azureml://datastores"): # remote path path_uri = AzureMLDatastorePathUri(flow.path) if path_uri.datastore != DEFAULT_STORAGE: raise ValueError(f"Only {DEFAULT_STORAGE} is supported as remote storage for now.") flow.path = path_uri.path flow._code_uploaded = True return else: raise ValueError("Path is required for flow.") with flow._build_code() as code: if code is None: return if flow._code_uploaded: return code.datastore = DEFAULT_STORAGE uploaded_code_asset = _check_and_upload_path( artifact=code, asset_operations=ops._code_assets, artifact_type="Code", show_progress=False, ) if "remote_path" in uploaded_code_asset: path = uploaded_code_asset["remote_path"] elif "remote path" in uploaded_code_asset: path = uploaded_code_asset["remote path"] flow.code = path flow.path = (Path(path) / flow.path).as_posix() flow._code_uploaded = True # endregion
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_connection_operations.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from typing import Dict from azure.ai.ml._scope_dependent_operations import ( OperationConfig, OperationsContainer, OperationScope, _ScopeDependentOperations, ) from promptflow._sdk._utils import safe_parse_object_list from promptflow._sdk.entities._connection import _Connection from promptflow._utils.logger_utils import get_cli_sdk_logger from promptflow.azure._entities._workspace_connection_spec import WorkspaceConnectionSpec from promptflow.azure._restclient.flow_service_caller import FlowServiceCaller logger = get_cli_sdk_logger() class ConnectionOperations(_ScopeDependentOperations): """ConnectionOperations. You should not instantiate this class directly. Instead, you should create an PFClient instance that instantiates it for you and attaches it as an attribute. """ def __init__( self, operation_scope: OperationScope, operation_config: OperationConfig, all_operations: OperationsContainer, credential, service_caller: FlowServiceCaller, **kwargs: Dict, ): super(ConnectionOperations, self).__init__(operation_scope, operation_config) self._all_operations = all_operations self._service_caller = service_caller self._credential = credential def create_or_update(self, connection, **kwargs): rest_conn = connection._to_rest_object() # create flow draft rest_conn_result = self._service_caller.create_connection( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, connection_name=connection.name, body=rest_conn, ) return _Connection._from_mt_rest_object(rest_conn_result) def get(self, name, **kwargs): rest_conn = self._service_caller.get_connection( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, connection_name=name, **kwargs, ) return _Connection._from_mt_rest_object(rest_conn) def delete(self, name, **kwargs): return self._service_caller.delete_connection( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, connection_name=name, **kwargs, ) def list(self, **kwargs): rest_connections = self._service_caller.list_connections( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, **kwargs, ) return safe_parse_object_list( obj_list=rest_connections, parser=_Connection._from_mt_rest_object, message_generator=lambda x: f"Failed to load connection {x.connection_name}, skipped.", ) def list_connection_specs(self, **kwargs): results = self._service_caller.list_connection_specs( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, **kwargs, ) return [WorkspaceConnectionSpec._from_rest_object(spec) for spec in results]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_artifact_utilities.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- # pylint: disable=protected-access import os import uuid from datetime import datetime, timedelta from pathlib import Path from typing import Dict, Optional, TypeVar, Union from azure.ai.ml._artifacts._blob_storage_helper import BlobStorageClient from azure.ai.ml._artifacts._gen2_storage_helper import Gen2StorageClient from azure.ai.ml._azure_environments import _get_storage_endpoint_from_metadata from azure.ai.ml._restclient.v2022_10_01.models import DatastoreType from azure.ai.ml._scope_dependent_operations import OperationScope from azure.ai.ml._utils._arm_id_utils import ( AMLNamedArmId, get_resource_name_from_arm_id, is_ARM_id_for_resource, remove_aml_prefix, ) from azure.ai.ml._utils._asset_utils import ( IgnoreFile, _build_metadata_dict, _validate_path, get_ignore_file, get_object_hash, ) from azure.ai.ml._utils._storage_utils import ( AzureMLDatastorePathUri, get_artifact_path_from_storage_url, get_storage_client, ) from azure.ai.ml.constants._common import SHORT_URI_FORMAT, STORAGE_ACCOUNT_URLS from azure.ai.ml.entities import Environment from azure.ai.ml.entities._assets._artifacts.artifact import Artifact, ArtifactStorageInfo from azure.ai.ml.entities._credentials import AccountKeyConfiguration from azure.ai.ml.entities._datastore._constants import WORKSPACE_BLOB_STORE from azure.ai.ml.exceptions import ErrorTarget, ValidationException from azure.ai.ml.operations._datastore_operations import DatastoreOperations from azure.storage.blob import BlobSasPermissions, generate_blob_sas from azure.storage.filedatalake import FileSasPermissions, generate_file_sas from ..._utils.logger_utils import LoggerFactory from ._fileshare_storeage_helper import FlowFileStorageClient module_logger = LoggerFactory.get_logger(__name__) def _get_datastore_name(*, datastore_name: Optional[str] = WORKSPACE_BLOB_STORE) -> str: datastore_name = WORKSPACE_BLOB_STORE if not datastore_name else datastore_name try: datastore_name = get_resource_name_from_arm_id(datastore_name) except (ValueError, AttributeError, ValidationException): module_logger.debug("datastore_name %s is not a full arm id. Proceed with a shortened name.\n", datastore_name) datastore_name = remove_aml_prefix(datastore_name) if is_ARM_id_for_resource(datastore_name): datastore_name = get_resource_name_from_arm_id(datastore_name) return datastore_name def get_datastore_info(operations: DatastoreOperations, name: str) -> Dict[str, str]: """Get datastore account, type, and auth information.""" datastore_info = {} if name: datastore = operations.get(name, include_secrets=True) else: datastore = operations.get_default(include_secrets=True) storage_endpoint = _get_storage_endpoint_from_metadata() credentials = datastore.credentials datastore_info["storage_type"] = datastore.type datastore_info["storage_account"] = datastore.account_name datastore_info["account_url"] = STORAGE_ACCOUNT_URLS[datastore.type].format( datastore.account_name, storage_endpoint ) if isinstance(credentials, AccountKeyConfiguration): datastore_info["credential"] = credentials.account_key else: try: datastore_info["credential"] = credentials.sas_token except Exception as e: # pylint: disable=broad-except if not hasattr(credentials, "sas_token"): datastore_info["credential"] = operations._credential else: raise e if datastore.type == DatastoreType.AZURE_BLOB: datastore_info["container_name"] = str(datastore.container_name) elif datastore.type == DatastoreType.AZURE_DATA_LAKE_GEN2: datastore_info["container_name"] = str(datastore.filesystem) elif datastore.type == DatastoreType.AZURE_FILE: datastore_info["container_name"] = str(datastore.file_share_name) else: raise Exception( f"Datastore type {datastore.type} is not supported for uploads. " f"Supported types are {DatastoreType.AZURE_BLOB} and {DatastoreType.AZURE_DATA_LAKE_GEN2}." ) return datastore_info def list_logs_in_datastore(ds_info: Dict[str, str], prefix: str, legacy_log_folder_name: str) -> Dict[str, str]: """Returns a dictionary of file name to blob or data lake uri with SAS token, matching the structure of RunDetails.logFiles. legacy_log_folder_name: the name of the folder in the datastore that contains the logs /azureml-logs/*.txt is the legacy log structure for commandJob and sweepJob /logs/azureml/*.txt is the legacy log structure for pipeline parent Job """ if ds_info["storage_type"] not in [ DatastoreType.AZURE_BLOB, DatastoreType.AZURE_DATA_LAKE_GEN2, ]: raise Exception("Only Blob and Azure DataLake Storage Gen2 datastores are supported.") storage_client = get_storage_client( credential=ds_info["credential"], container_name=ds_info["container_name"], storage_account=ds_info["storage_account"], storage_type=ds_info["storage_type"], ) items = storage_client.list(starts_with=prefix + "/user_logs/") # Append legacy log files if present items.extend(storage_client.list(starts_with=prefix + legacy_log_folder_name)) log_dict = {} for item_name in items: sub_name = item_name.split(prefix + "/")[1] if isinstance(storage_client, BlobStorageClient): token = generate_blob_sas( account_name=ds_info["storage_account"], container_name=ds_info["container_name"], blob_name=item_name, account_key=ds_info["credential"], permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(minutes=30), ) elif isinstance(storage_client, Gen2StorageClient): token = generate_file_sas( # pylint: disable=no-value-for-parameter account_name=ds_info["storage_account"], file_system_name=ds_info["container_name"], file_name=item_name, credential=ds_info["credential"], permission=FileSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(minutes=30), ) log_dict[sub_name] = "{}/{}/{}?{}".format(ds_info["account_url"], ds_info["container_name"], item_name, token) return log_dict def _get_default_datastore_info(datastore_operation): return get_datastore_info(datastore_operation, None) def upload_artifact( local_path: str, datastore_operation: DatastoreOperations, operation_scope: OperationScope, datastore_name: Optional[str], asset_hash: Optional[str] = None, show_progress: bool = True, asset_name: Optional[str] = None, asset_version: Optional[str] = None, ignore_file: IgnoreFile = IgnoreFile(None), sas_uri=None, ) -> ArtifactStorageInfo: """Upload local file or directory to datastore.""" if sas_uri: storage_client = get_storage_client(credential=None, storage_account=None, account_url=sas_uri) else: datastore_name = _get_datastore_name(datastore_name=datastore_name) datastore_info = get_datastore_info(datastore_operation, datastore_name) storage_client = FlowFileStorageClient( credential=datastore_info["credential"], file_share_name=datastore_info["container_name"], account_url=datastore_info["account_url"], azure_cred=datastore_operation._credential, ) artifact_info = storage_client.upload( local_path, asset_hash=asset_hash, show_progress=show_progress, name=asset_name, version=asset_version, ignore_file=ignore_file, ) artifact_info["remote path"] = os.path.join( storage_client.directory_client.directory_path, artifact_info["remote path"] ) return artifact_info def download_artifact( starts_with: Union[str, os.PathLike], destination: str, datastore_operation: DatastoreOperations, datastore_name: Optional[str], datastore_info: Optional[Dict] = None, ) -> str: """Download datastore path to local file or directory. :param Union[str, os.PathLike] starts_with: Prefix of blobs to download :param str destination: Path that files will be written to :param DatastoreOperations datastore_operation: Datastore operations :param Optional[str] datastore_name: name of datastore :param Dict datastore_info: the return value of invoking get_datastore_info :return str: Path that files were written to """ starts_with = starts_with.as_posix() if isinstance(starts_with, Path) else starts_with datastore_name = _get_datastore_name(datastore_name=datastore_name) if datastore_info is None: datastore_info = get_datastore_info(datastore_operation, datastore_name) storage_client = get_storage_client(**datastore_info) storage_client.download(starts_with=starts_with, destination=destination) return destination def download_artifact_from_storage_url( blob_url: str, destination: str, datastore_operation: DatastoreOperations, datastore_name: Optional[str], ) -> str: """Download datastore blob URL to local file or directory.""" datastore_name = _get_datastore_name(datastore_name=datastore_name) datastore_info = get_datastore_info(datastore_operation, datastore_name) starts_with = get_artifact_path_from_storage_url( blob_url=str(blob_url), container_name=datastore_info.get("container_name") ) return download_artifact( starts_with=starts_with, destination=destination, datastore_operation=datastore_operation, datastore_name=datastore_name, datastore_info=datastore_info, ) def download_artifact_from_aml_uri(uri: str, destination: str, datastore_operation: DatastoreOperations): """Downloads artifact pointed to by URI of the form `azureml://...` to destination. :param str uri: AzureML uri of artifact to download :param str destination: Path to download artifact to :param DatastoreOperations datastore_operation: datastore operations :return str: Path that files were downloaded to """ parsed_uri = AzureMLDatastorePathUri(uri) return download_artifact( starts_with=parsed_uri.path, destination=destination, datastore_operation=datastore_operation, datastore_name=parsed_uri.datastore, ) def aml_datastore_path_exists( uri: str, datastore_operation: DatastoreOperations, datastore_info: Optional[dict] = None ): """Checks whether `uri` of the form "azureml://" points to either a directory or a file. :param str uri: azure ml datastore uri :param DatastoreOperations datastore_operation: Datastore operation :param dict datastore_info: return value of get_datastore_info """ parsed_uri = AzureMLDatastorePathUri(uri) datastore_info = datastore_info or get_datastore_info(datastore_operation, parsed_uri.datastore) return get_storage_client(**datastore_info).exists(parsed_uri.path) def _upload_to_datastore( operation_scope: OperationScope, datastore_operation: DatastoreOperations, path: Union[str, Path, os.PathLike], artifact_type: str, datastore_name: Optional[str] = None, show_progress: bool = True, asset_name: Optional[str] = None, asset_version: Optional[str] = None, asset_hash: Optional[str] = None, ignore_file: Optional[IgnoreFile] = None, sas_uri: Optional[str] = None, # contains registry sas url ) -> ArtifactStorageInfo: _validate_path(path, _type=artifact_type) if not ignore_file: ignore_file = get_ignore_file(path) if not asset_hash: asset_hash = get_object_hash(path, ignore_file) artifact = upload_artifact( str(path), datastore_operation, operation_scope, datastore_name, show_progress=show_progress, asset_hash=asset_hash, asset_name=asset_name, asset_version=asset_version, ignore_file=ignore_file, sas_uri=sas_uri, ) return artifact def _upload_and_generate_remote_uri( operation_scope: OperationScope, datastore_operation: DatastoreOperations, path: Union[str, Path, os.PathLike], artifact_type: str = ErrorTarget.ARTIFACT, datastore_name: str = WORKSPACE_BLOB_STORE, show_progress: bool = True, ) -> str: # Asset name is required for uploading to a datastore asset_name = str(uuid.uuid4()) artifact_info = _upload_to_datastore( operation_scope=operation_scope, datastore_operation=datastore_operation, path=path, datastore_name=datastore_name, asset_name=asset_name, artifact_type=artifact_type, show_progress=show_progress, ) path = artifact_info.relative_path datastore = AMLNamedArmId(artifact_info.datastore_arm_id).asset_name return SHORT_URI_FORMAT.format(datastore, path) def _update_metadata(name, version, indicator_file, datastore_info) -> None: storage_client = get_storage_client(**datastore_info) if isinstance(storage_client, BlobStorageClient): _update_blob_metadata(name, version, indicator_file, storage_client) elif isinstance(storage_client, Gen2StorageClient): _update_gen2_metadata(name, version, indicator_file, storage_client) def _update_blob_metadata(name, version, indicator_file, storage_client) -> None: container_client = storage_client.container_client if indicator_file.startswith(storage_client.container): indicator_file = indicator_file.split(storage_client.container)[1] blob = container_client.get_blob_client(blob=indicator_file) blob.set_blob_metadata(_build_metadata_dict(name=name, version=version)) def _update_gen2_metadata(name, version, indicator_file, storage_client) -> None: artifact_directory_client = storage_client.file_system_client.get_directory_client(indicator_file) artifact_directory_client.set_metadata(_build_metadata_dict(name=name, version=version)) T = TypeVar("T", bound=Artifact) def _check_and_upload_path( artifact: T, asset_operations: Union["DataOperations", "ModelOperations", "CodeOperations", "FeatureSetOperations"], artifact_type: str, datastore_name: Optional[str] = None, sas_uri: Optional[str] = None, show_progress: bool = True, ): """Checks whether `artifact` is a path or a uri and uploads it to the datastore if necessary. param T artifact: artifact to check and upload param Union["DataOperations", "ModelOperations", "CodeOperations"] asset_operations: the asset operations to use for uploading param str datastore_name: the name of the datastore to upload to param str sas_uri: the sas uri to use for uploading """ from azure.ai.ml._utils.utils import is_mlflow_uri, is_url datastore_name = artifact.datastore if ( hasattr(artifact, "local_path") and artifact.local_path is not None or ( hasattr(artifact, "path") and artifact.path is not None and not (is_url(artifact.path) or is_mlflow_uri(artifact.path)) ) ): path = ( Path(artifact.path) if hasattr(artifact, "path") and artifact.path is not None else Path(artifact.local_path) ) if not path.is_absolute(): path = Path(artifact.base_path, path).resolve() uploaded_artifact = _upload_to_datastore( asset_operations._operation_scope, asset_operations._datastore_operation, path, datastore_name=datastore_name, asset_name=artifact.name, asset_version=str(artifact.version), asset_hash=artifact._upload_hash if hasattr(artifact, "_upload_hash") else None, sas_uri=sas_uri, artifact_type=artifact_type, show_progress=show_progress, ignore_file=getattr(artifact, "_ignore_file", None), ) return uploaded_artifact def _check_and_upload_env_build_context( environment: Environment, operations: "EnvironmentOperations", sas_uri=None, show_progress: bool = True, ) -> Environment: if environment.path: uploaded_artifact = _upload_to_datastore( operations._operation_scope, operations._datastore_operation, environment.path, asset_name=environment.name, asset_version=str(environment.version), asset_hash=environment._upload_hash, sas_uri=sas_uri, artifact_type=ErrorTarget.ENVIRONMENT, datastore_name=environment.datastore, show_progress=show_progress, ) # TODO: Depending on decision trailing "/" needs to stay or not. EMS requires it to be present environment.build.path = uploaded_artifact.full_storage_path + "/" return environment
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_run_operations.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import asyncio import concurrent import copy import hashlib import json import os import shutil import sys import time from concurrent.futures import ThreadPoolExecutor from functools import cached_property from pathlib import Path from typing import Any, Dict, List, Optional, Union import requests from azure.ai.ml._artifacts._artifact_utilities import _upload_and_generate_remote_uri from azure.ai.ml._scope_dependent_operations import ( OperationConfig, OperationsContainer, OperationScope, _ScopeDependentOperations, ) from azure.ai.ml.constants._common import AssetTypes, AzureMLResourceType from azure.ai.ml.entities import Workspace from azure.ai.ml.operations import DataOperations from azure.ai.ml.operations._operation_orchestrator import OperationOrchestrator from promptflow._constants import LANGUAGE_KEY, FlowLanguage from promptflow._sdk._constants import ( LINE_NUMBER, MAX_RUN_LIST_RESULTS, MAX_SHOW_DETAILS_RESULTS, PROMPT_FLOW_DIR_NAME, PROMPT_FLOW_RUNS_DIR_NAME, REGISTRY_URI_PREFIX, VIS_PORTAL_URL_TMPL, AzureRunTypes, ListViewType, RunDataKeys, RunHistoryKeys, RunStatus, ) from promptflow._sdk._errors import InvalidRunStatusError, RunNotFoundError, RunOperationParameterError from promptflow._sdk._telemetry import ActivityType, WorkspaceTelemetryMixin, monitor_operation from promptflow._sdk._utils import in_jupyter_notebook, incremental_print, is_remote_uri, print_red_error from promptflow._sdk.entities import Run from promptflow._utils.async_utils import async_run_allowing_running_loop from promptflow._utils.flow_utils import get_flow_lineage_id from promptflow._utils.logger_utils import get_cli_sdk_logger from promptflow.azure._constants._flow import AUTOMATIC_RUNTIME, AUTOMATIC_RUNTIME_NAME, CLOUD_RUNS_PAGE_SIZE from promptflow.azure._load_functions import load_flow from promptflow.azure._restclient.flow_service_caller import FlowServiceCaller from promptflow.azure._utils.gerneral import get_authorization, get_user_alias_from_credential from promptflow.azure.operations._flow_operations import FlowOperations from promptflow.exceptions import UserErrorException RUNNING_STATUSES = RunStatus.get_running_statuses() logger = get_cli_sdk_logger() class RunRequestException(Exception): """RunRequestException.""" def __init__(self, message): super().__init__(message) class RunOperations(WorkspaceTelemetryMixin, _ScopeDependentOperations): """RunOperations that can manage runs. You should not instantiate this class directly. Instead, you should create an :class:`~promptflow.azure.PFClient` instance and this operation is available as the instance's attribute. """ def __init__( self, operation_scope: OperationScope, operation_config: OperationConfig, all_operations: OperationsContainer, flow_operations: FlowOperations, credential, service_caller: FlowServiceCaller, workspace: Workspace, **kwargs: Dict, ): super().__init__( operation_scope=operation_scope, operation_config=operation_config, workspace_name=operation_scope.workspace_name, subscription_id=operation_scope.subscription_id, resource_group_name=operation_scope.resource_group_name, ) self._operation_scope = operation_scope self._all_operations = all_operations self._service_caller = service_caller self._workspace = workspace self._credential = credential self._flow_operations = flow_operations self._orchestrators = OperationOrchestrator(self._all_operations, self._operation_scope, self._operation_config) self._workspace_default_datastore = self._datastore_operations.get_default() @property def _data_operations(self): return self._all_operations.get_operation(AzureMLResourceType.DATA, lambda x: isinstance(x, DataOperations)) @property def _datastore_operations(self) -> "DatastoreOperations": return self._all_operations.all_operations[AzureMLResourceType.DATASTORE] @cached_property def _run_history_endpoint_url(self): """Get the endpoint url for the workspace.""" endpoint = self._service_caller._service_endpoint return endpoint + "history/v1.0" + self._service_caller._common_azure_url_pattern def _get_run_portal_url(self, run_id: str): """Get the portal url for the run.""" portal_url, run_info = None, None try: run_info = self._get_run_from_pfs(run_id=run_id) except Exception as e: logger.warning(f"Failed to get run portal url from pfs for run {run_id!r}: {str(e)}") if run_info and hasattr(run_info, "studio_portal_endpoint"): portal_url = run_info.studio_portal_endpoint return portal_url def _get_headers(self): custom_header = { "Authorization": get_authorization(credential=self._credential), "Content-Type": "application/json", } return custom_header @monitor_operation(activity_name="pfazure.runs.create_or_update", activity_type=ActivityType.PUBLICAPI) def create_or_update(self, run: Run, **kwargs) -> Run: """Create or update a run. :param run: Run object to create or update. :type run: ~promptflow.entities.Run :return: Run object created or updated. :rtype: ~promptflow.entities.Run """ stream = kwargs.pop("stream", False) reset = kwargs.pop("reset_runtime", False) # validate the run object run._validate_for_run_create_operation() rest_obj = self._resolve_dependencies_in_parallel(run=run, runtime=kwargs.get("runtime"), reset=reset) self._service_caller.submit_bulk_run( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, body=rest_obj, ) if in_jupyter_notebook(): print(f"Portal url: {self._get_run_portal_url(run_id=run.name)}") if stream: self.stream(run=run.name) return self.get(run=run.name) @monitor_operation(activity_name="pfazure.runs.list", activity_type=ActivityType.PUBLICAPI) def list( self, max_results: int = MAX_RUN_LIST_RESULTS, list_view_type: ListViewType = ListViewType.ACTIVE_ONLY, **kwargs ) -> List[Run]: """List runs in the workspace. :param max_results: The max number of runs to return, defaults to 50, max is 100 :type max_results: int :param list_view_type: The list view type, defaults to ListViewType.ACTIVE_ONLY :type list_view_type: ListViewType :return: The list of runs. :rtype: List[~promptflow.entities.Run] """ if not isinstance(max_results, int) or max_results < 0: raise RunOperationParameterError(f"'max_results' must be a positive integer, got {max_results!r}") headers = self._get_headers() filter_archived = [] if list_view_type == ListViewType.ACTIVE_ONLY: filter_archived = ["false"] elif list_view_type == ListViewType.ARCHIVED_ONLY: filter_archived = ["true"] elif list_view_type == ListViewType.ALL: filter_archived = ["true", "false"] else: raise RunOperationParameterError( f"Invalid list view type: {list_view_type!r}, expecting one of ['ActiveOnly', 'ArchivedOnly', 'All']" ) pay_load = { "filters": [ {"field": "type", "operator": "eq", "values": ["runs"]}, {"field": "annotations/archived", "operator": "eq", "values": filter_archived}, { "field": "properties/runType", "operator": "contains", "values": [ AzureRunTypes.BATCH, AzureRunTypes.EVALUATION, AzureRunTypes.PAIRWISE_EVALUATE, ], }, ], "freeTextSearch": "", "order": [{"direction": "Desc", "field": "properties/creationContext/createdTime"}], # index service can return 100 results at most "pageSize": min(max_results, 100), "skip": 0, "includeTotalResultCount": True, "searchBuilder": "AppendPrefix", } endpoint = self._run_history_endpoint_url.replace("/history", "/index") url = endpoint + "/entities" response = requests.post(url, headers=headers, json=pay_load) if response.status_code == 200: entities = json.loads(response.text) runs = entities["value"] else: raise RunRequestException( f"Failed to get runs from service. Code: {response.status_code}, text: {response.text}" ) refined_runs = [] for run in runs: refined_runs.append(Run._from_index_service_entity(run)) return refined_runs @monitor_operation(activity_name="pfazure.runs.get_metrics", activity_type=ActivityType.PUBLICAPI) def get_metrics(self, run: Union[str, Run], **kwargs) -> dict: """Get the metrics from the run. :param run: The run or the run object :type run: Union[str, ~promptflow.entities.Run] :return: The metrics :rtype: dict """ run = Run._validate_and_return_run_name(run) self._check_cloud_run_completed(run_name=run) metrics = self._get_metrics_from_metric_service(run) return metrics @monitor_operation(activity_name="pfazure.runs.get_details", activity_type=ActivityType.PUBLICAPI) def get_details( self, run: Union[str, Run], max_results: int = MAX_SHOW_DETAILS_RESULTS, all_results: bool = False, **kwargs ) -> "DataFrame": """Get the details from the run. .. note:: If `all_results` is set to True, `max_results` will be overwritten to sys.maxsize. :param run: The run name or run object :type run: Union[str, ~promptflow.sdk.entities.Run] :param max_results: The max number of runs to return, defaults to 100 :type max_results: int :param all_results: Whether to return all results, defaults to False :type all_results: bool :raises RunOperationParameterError: If `max_results` is not a positive integer. :return: The details data frame. :rtype: pandas.DataFrame """ from pandas import DataFrame # if all_results is True, set max_results to sys.maxsize if all_results: max_results = sys.maxsize if not isinstance(max_results, int) or max_results < 1: raise RunOperationParameterError(f"'max_results' must be a positive integer, got {max_results!r}") run = Run._validate_and_return_run_name(run) self._check_cloud_run_completed(run_name=run) child_runs = self._get_flow_runs_pagination(run, max_results=max_results) inputs, outputs = self._get_inputs_outputs_from_child_runs(child_runs) # if there is any line run failed, the number of inputs and outputs will be different # this will result in pandas raising ValueError, so we need to handle mismatched case # if all line runs are failed, no need to fill the outputs if len(outputs) > 0: # get total number of line runs from inputs num_line_runs = len(list(inputs.values())[0]) num_outputs = len(list(outputs.values())[0]) if num_line_runs > num_outputs: # build full set with None as placeholder filled_outputs = {} output_keys = list(outputs.keys()) for k in output_keys: filled_outputs[k] = [None] * num_line_runs filled_outputs[LINE_NUMBER] = list(range(num_line_runs)) for i in range(num_outputs): line_number = outputs[LINE_NUMBER][i] for k in output_keys: filled_outputs[k][line_number] = outputs[k][i] # replace defective outputs with full set outputs = copy.deepcopy(filled_outputs) data = {} columns = [] for k in inputs: new_k = f"inputs.{k}" data[new_k] = copy.deepcopy(inputs[k]) columns.append(new_k) for k in outputs: new_k = f"outputs.{k}" data[new_k] = copy.deepcopy(outputs[k]) columns.append(new_k) df = DataFrame(data).reindex(columns=columns) if f"outputs.{LINE_NUMBER}" in columns: df = df.set_index(f"outputs.{LINE_NUMBER}") return df def _check_cloud_run_completed(self, run_name: str) -> bool: """Check if the cloud run is completed.""" run = self.get(run=run_name) run._check_run_status_is_completed() def _get_flow_runs_pagination(self, name: str, max_results: int) -> List[dict]: # call childRuns API with pagination to avoid PFS OOM # different from UX, run status should be completed here flow_runs = [] start_index, end_index = 0, CLOUD_RUNS_PAGE_SIZE - 1 while start_index < max_results: current_flow_runs = self._service_caller.get_child_runs( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, flow_run_id=name, start_index=start_index, end_index=end_index, ) # no data in current page if len(current_flow_runs) == 0: break start_index, end_index = start_index + CLOUD_RUNS_PAGE_SIZE, end_index + CLOUD_RUNS_PAGE_SIZE flow_runs += current_flow_runs return flow_runs[0:max_results] def _extract_metrics_from_metric_service_response(self, values) -> dict: """Get metrics from the metric service response.""" refined_metrics = {} metric_list = values.get("value", []) if not metric_list: return refined_metrics for metric in metric_list: metric_name = metric["name"] if self._is_system_metric(metric_name): continue refined_metrics[metric_name] = metric["value"][0]["data"][metric_name] return refined_metrics def _get_metrics_from_metric_service(self, run_id) -> dict: """Get the metrics from metric service.""" headers = self._get_headers() # refer to MetricController: https://msdata.visualstudio.com/Vienna/_git/vienna?path=/src/azureml-api/src/Metric/EntryPoints/Api/Controllers/MetricController.cs&version=GBmaster # noqa: E501 endpoint = self._run_history_endpoint_url.replace("/history/v1.0", "/metric/v2.0") url = endpoint + f"/runs/{run_id}/lastvalues" response = requests.post(url, headers=headers, json={}) if response.status_code == 200: values = response.json() return self._extract_metrics_from_metric_service_response(values) else: raise RunRequestException( f"Failed to get metrics from service. Code: {response.status_code}, text: {response.text}" ) @staticmethod def _is_system_metric(metric: str) -> bool: """Check if the metric is system metric. Current we have some system metrics like: __pf__.lines.completed, __pf__.lines.bypassed, __pf__.lines.failed, __pf__.nodes.xx.completed """ return ( metric.endswith(".completed") or metric.endswith(".bypassed") or metric.endswith(".failed") or metric.endswith(".is_completed") ) @monitor_operation(activity_name="pfazure.runs.get", activity_type=ActivityType.PUBLICAPI) def get(self, run: Union[str, Run], **kwargs) -> Run: """Get a run. :param run: The run name :type run: Union[str, ~promptflow.entities.Run] :return: The run object :rtype: ~promptflow.entities.Run """ run = Run._validate_and_return_run_name(run) return self._get_run_from_run_history(flow_run_id=run, **kwargs) def _get_run_from_run_history(self, flow_run_id, original_form=False, **kwargs): """Get run info from run history""" headers = self._get_headers() url = self._run_history_endpoint_url + "/rundata" payload = { "runId": flow_run_id, "selectRunMetadata": True, "selectRunDefinition": True, "selectJobSpecification": True, } response = requests.post(url, headers=headers, json=payload) if response.status_code == 200: run = response.json() # if original_form is True, return the original run data from run history, mainly for test use if original_form: return run run_data = self._refine_run_data_from_run_history(run) run = Run._from_run_history_entity(run_data) return run elif response.status_code == 404: raise RunNotFoundError(f"Run {flow_run_id!r} not found.") else: raise RunRequestException( f"Failed to get run from service. Code: {response.status_code}, text: {response.text}" ) def _refine_run_data_from_run_history(self, run_data: dict) -> dict: """Refine the run data from run history. Generate the portal url, input and output value from run history data. """ run_data = run_data[RunHistoryKeys.RunMetaData] # add cloud run url run_data[RunDataKeys.PORTAL_URL] = self._get_run_portal_url(run_id=run_data["runId"]) # get input and output value # TODO: Unify below values to the same pattern - azureml://xx properties = run_data["properties"] input_data = properties.pop("azureml.promptflow.input_data", None) input_run_id = properties.pop("azureml.promptflow.input_run_id", None) output_data = run_data["outputs"] if output_data: output_data = output_data.get("flow_outputs", {}).get("assetId", None) run_data[RunDataKeys.DATA] = input_data run_data[RunDataKeys.RUN] = input_run_id run_data[RunDataKeys.OUTPUT] = output_data return run_data def _get_run_from_index_service(self, flow_run_id, **kwargs): """Get run info from index service""" headers = self._get_headers() payload = { "filters": [ {"field": "type", "operator": "eq", "values": ["runs"]}, {"field": "annotations/archived", "operator": "eq", "values": ["false"]}, {"field": "properties/runId", "operator": "eq", "values": [flow_run_id]}, ], "order": [{"direction": "Desc", "field": "properties/startTime"}], "pageSize": 50, } endpoint = self._run_history_endpoint_url.replace("/history", "/index") url = endpoint + "/entities" response = requests.post(url, json=payload, headers=headers) if response.status_code == 200: runs = response.json().get("value", None) if not runs: raise RunRequestException( f"Could not found run with run id {flow_run_id!r}, please double check the run id and try again." ) run = runs[0] return Run._from_index_service_entity(run) else: raise RunRequestException( f"Failed to get run metrics from service. Code: {response.status_code}, text: {response.text}" ) def _get_run_from_pfs(self, run_id, **kwargs): """Get run info from pfs""" return self._service_caller.get_flow_run( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, flow_run_id=run_id, ) @monitor_operation(activity_name="pfazure.runs.archive", activity_type=ActivityType.PUBLICAPI) def archive(self, run: Union[str, Run]) -> Run: """Archive a run. :param run: The run name or run object :type run: Union[str, ~promptflow.entities.Run] :return: The run object :rtype: ~promptflow.entities.Run """ run = Run._validate_and_return_run_name(run) payload = { RunHistoryKeys.HIDDEN: True, } return self._modify_run_in_run_history(run_id=run, payload=payload) @monitor_operation(activity_name="pfazure.runs.restore", activity_type=ActivityType.PUBLICAPI) def restore(self, run: Union[str, Run]) -> Run: """Restore a run. :param run: The run name or run object :type run: Union[str, ~promptflow.entities.Run] :return: The run object :rtype: ~promptflow.entities.Run """ run = Run._validate_and_return_run_name(run) payload = { RunHistoryKeys.HIDDEN: False, } return self._modify_run_in_run_history(run_id=run, payload=payload) def _get_log(self, flow_run_id: str) -> str: return self._service_caller.caller.bulk_runs.get_flow_run_log_content( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, flow_run_id=flow_run_id, headers=self._get_headers(), ) @monitor_operation(activity_name="pfazure.runs.update", activity_type=ActivityType.PUBLICAPI) def update( self, run: Union[str, Run], display_name: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, ) -> Optional[Run]: """Update a run. May update the display name, description or tags. .. note:: - Display name and description are strings, and tags is a dictionary of key-value pairs, both key and value are also strings. - Tags is a dictionary of key-value pairs. Updating tags will overwrite the existing key-value pair, but will not delete the existing key-value pairs. :param run: The run name or run object :type run: Union[str, ~promptflow.entities.Run] :param display_name: The display name :type display_name: Optional[str] :param description: The description :type description: Optional[str] :param tags: The tags :type tags: Optional[Dict[str, str]] :raises UpdateRunError: If nothing or wrong type values provided to update the run. :return: The run object :rtype: Optional[~promptflow.entities.Run] """ run = Run._validate_and_return_run_name(run) if display_name is None and description is None and tags is None: logger.warning("Nothing provided to update the run.") return None payload = {} if isinstance(display_name, str): payload["displayName"] = display_name elif display_name is not None: logger.warning(f"Display name must be a string, got {type(display_name)!r}: {display_name!r}.") if isinstance(description, str): payload["description"] = description elif description is not None: logger.warning(f"Description must be a string, got {type(description)!r}: {description!r}.") # check if the tags type is Dict[str, str] if isinstance(tags, dict) and all( isinstance(key, str) and isinstance(value, str) for key, value in tags.items() ): payload["tags"] = tags elif tags is not None: logger.warning(f"Tags type must be 'Dict[str, str]', got non-dict or non-string key/value in tags: {tags}.") return self._modify_run_in_run_history(run_id=run, payload=payload) @monitor_operation(activity_name="pfazure.runs.stream", activity_type=ActivityType.PUBLICAPI) def stream(self, run: Union[str, Run], raise_on_error: bool = True) -> Run: """Stream the logs of a run. :param run: The run name or run object :type run: Union[str, ~promptflow.entities.Run] :param raise_on_error: Raises an exception if a run fails or canceled. :type raise_on_error: bool :return: The run object :rtype: ~promptflow.entities.Run """ run = self.get(run=run) # TODO: maybe we need to make this configurable file_handler = sys.stdout # different from Azure ML job, flow job can run very fast, so it might not print anything; # use below variable to track this behavior, and at least print something to the user. try: printed = 0 stream_count = 0 start = time.time() while run.status in RUNNING_STATUSES or run.status == RunStatus.FINALIZING: file_handler.flush() stream_count += 1 # print prompt every 3 times, in case there is no log printed if stream_count % 3 == 0: # print prompt every 3 times file_handler.write(f"(Run status is {run.status!r}, continue streaming...)\n") # if the run is not started for 5 minutes, print an error message and break the loop if run.status == RunStatus.NOT_STARTED: current = time.time() if current - start > 300: file_handler.write( f"The run {run.name!r} is in status 'NotStarted' for 5 minutes, streaming is stopped." "Please make sure you are using the latest runtime.\n" ) break available_logs = self._get_log(flow_run_id=run.name) printed = incremental_print(available_logs, printed, file_handler) time.sleep(10) run = self.get(run=run.name) # ensure all logs are printed file_handler.flush() available_logs = self._get_log(flow_run_id=run.name) incremental_print(available_logs, printed, file_handler) file_handler.write("======= Run Summary =======\n") duration = None if run._start_time and run._end_time: duration = str(run._end_time - run._start_time) file_handler.write( f'Run name: "{run.name}"\n' f'Run status: "{run.status}"\n' f'Start time: "{run._start_time}"\n' f'Duration: "{duration}"\n' f'Run url: "{self._get_run_portal_url(run_id=run.name)}"' ) except KeyboardInterrupt: error_message = ( "The output streaming for the flow run was interrupted.\n" "But the run is still executing on the cloud.\n" ) print(error_message) if run.status == RunStatus.FAILED or run.status == RunStatus.CANCELED: if run.status == RunStatus.FAILED: try: error_message = run._error["error"]["message"] except Exception: # pylint: disable=broad-except error_message = "Run fails with unknown error." else: error_message = "Run is canceled." if raise_on_error: raise InvalidRunStatusError(error_message) else: print_red_error(error_message) return run def _resolve_data_to_asset_id(self, run: Run): # Skip if no data provided if run.data is None: return test_data = run.data def _get_data_type(_data): if os.path.isdir(_data): return AssetTypes.URI_FOLDER else: return AssetTypes.URI_FILE if is_remote_uri(test_data): # Pass through ARM id or remote url return test_data if os.path.exists(test_data): # absolute local path, upload, transform to remote url data_type = _get_data_type(test_data) test_data = _upload_and_generate_remote_uri( self._operation_scope, self._datastore_operations, test_data, datastore_name=self._workspace_default_datastore.name, show_progress=self._show_progress, ) if data_type == AssetTypes.URI_FOLDER and test_data and not test_data.endswith("/"): test_data = test_data + "/" else: raise ValueError( f"Local path {test_data!r} not exist. " "If it's remote data, only data with azureml prefix or remote url is supported." ) return test_data def _resolve_flow(self, run: Run): if run._use_remote_flow: return self._resolve_flow_definition_resource_id(run=run) flow = load_flow(run.flow) self._flow_operations._resolve_arm_id_or_upload_dependencies( flow=flow, # ignore .promptflow/dag.tools.json only for run submission scenario in python ignore_tools_json=flow._flow_dict.get(LANGUAGE_KEY, None) != FlowLanguage.CSharp, ) return flow.path def _get_session_id(self, flow): try: user_alias = get_user_alias_from_credential(self._credential) except Exception: # fall back to unknown user when failed to get credential. user_alias = "unknown_user" flow_id = get_flow_lineage_id(flow_dir=flow) session_id = f"{user_alias}_{flow_id}" # hash and truncate to avoid the session id getting too long # backend has a 64 bit limit for session id. # use hexdigest to avoid non-ascii characters in session id session_id = str(hashlib.sha256(session_id.encode()).hexdigest())[:48] return session_id def _get_inputs_outputs_from_child_runs(self, runs: List[Dict[str, Any]]): """Get the inputs and outputs from the child runs.""" inputs = {} outputs = {} outputs[LINE_NUMBER] = [] runs.sort(key=lambda x: x["index"]) # 1st loop, until have all outputs keys outputs_keys = [] for run in runs: run_outputs = run["output"] if isinstance(run_outputs, dict): for k in run_outputs: outputs_keys.append(k) break # 2nd complete loop, get values for run in runs: index, run_inputs, run_outputs = run["index"], run["inputs"], run["output"] # input should always available as a dict for k, v in run_inputs.items(): if k not in inputs: inputs[k] = [] inputs[k].append(v) # output outputs[LINE_NUMBER].append(index) # for failed line run, output is None, instead of a dict # in this case, we append an empty line if not isinstance(run_outputs, dict): for k in outputs_keys: if k == LINE_NUMBER: continue if k not in outputs: outputs[k] = [] outputs[k].append(None) else: for k, v in run_outputs.items(): if k not in outputs: outputs[k] = [] outputs[k].append(v) return inputs, outputs @monitor_operation(activity_name="pfazure.runs.visualize", activity_type=ActivityType.PUBLICAPI) def visualize(self, runs: Union[str, Run, List[str], List[Run]], **kwargs) -> None: """Visualize run(s) using Azure AI portal. :param runs: Names of the runs, or list of run objects. :type runs: Union[str, ~promptflow.sdk.entities.Run, List[str], List[~promptflow.sdk.entities.Run]] """ if not isinstance(runs, list): runs = [runs] validated_runs = [] for run in runs: run_name = Run._validate_and_return_run_name(run) validated_runs.append(run_name) subscription_id = self._operation_scope.subscription_id resource_group_name = self._operation_scope.resource_group_name workspace_name = self._operation_scope.workspace_name names = ",".join(validated_runs) portal_url = VIS_PORTAL_URL_TMPL.format( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, names=names, ) print(f"Web View: {portal_url}") def _resolve_automatic_runtime(self): logger.warning( f"You're using {AUTOMATIC_RUNTIME}, if it's first time you're using it, " "it may take a while to build runtime and you may see 'NotStarted' status for a while. " ) runtime_name = AUTOMATIC_RUNTIME_NAME return runtime_name def _resolve_runtime(self, run, flow_path, runtime): runtime = run._runtime or runtime # for remote flow case, use flow name as session id # for local flow case, use flow path to calculate session id session_id = run._flow_name if run._use_remote_flow else self._get_session_id(flow=flow_path) if runtime is None or runtime == AUTOMATIC_RUNTIME_NAME: runtime = self._resolve_automatic_runtime() elif not isinstance(runtime, str): raise TypeError(f"runtime should be a string, got {type(runtime)} for {runtime}") return runtime, session_id def _resolve_dependencies_in_parallel(self, run, runtime, reset=None): flow_path = run.flow with ThreadPoolExecutor() as pool: tasks = [ pool.submit(self._resolve_data_to_asset_id, run=run), pool.submit(self._resolve_flow, run=run), ] concurrent.futures.wait(tasks, return_when=concurrent.futures.ALL_COMPLETED) task_results = [task.result() for task in tasks] run.data = task_results[0] run.flow = task_results[1] runtime, session_id = self._resolve_runtime(run=run, flow_path=flow_path, runtime=runtime) rest_obj = run._to_rest_object() rest_obj.runtime_name = runtime rest_obj.session_id = session_id # TODO(2884482): support force reset & force install if runtime == "None": # HARD CODE for office scenario, use workspace default runtime when specified None rest_obj.runtime_name = None return rest_obj def _refine_payload_for_run_update(self, payload: dict, key: str, value, expected_type: type) -> dict: """Refine the payload for run update.""" if value is not None: payload[key] = value return payload def _modify_run_in_run_history(self, run_id: str, payload: dict) -> Run: """Modify run info in run history.""" headers = self._get_headers() url = self._run_history_endpoint_url + f"/runs/{run_id}/modify" response = requests.patch(url, headers=headers, json=payload) if response.status_code == 200: # the modify api returns different data format compared with get api, so we use get api here to # return standard Run object return self.get(run=run_id) else: raise RunRequestException( f"Failed to modify run in run history. Code: {response.status_code}, text: {response.text}" ) def _resolve_flow_definition_resource_id(self, run: Run): """Resolve the flow definition resource id.""" # for registry flow pattern, the flow uri can be passed as flow definition resource id directly if run.flow.startswith(REGISTRY_URI_PREFIX): return run.flow # for workspace flow pattern, generate the flow definition resource id workspace_id = self._workspace._workspace_id location = self._workspace.location return f"azureml://locations/{location}/workspaces/{workspace_id}/flows/{run._flow_name}" @monitor_operation(activity_name="pfazure.runs.download", activity_type=ActivityType.PUBLICAPI) def download( self, run: Union[str, Run], output: Optional[Union[str, Path]] = None, overwrite: Optional[bool] = False ) -> str: """Download the data of a run, including input, output, snapshot and other run information. .. note:: After the download is finished, you can use ``pf run create --source <run-info-local-folder>`` to register this run as a local run record, then you can use commands like ``pf run show/visualize`` to inspect the run just like a run that was created from local flow. :param run: The run name or run object :type run: Union[str, ~promptflow.entities.Run] :param output: The output directory. Default to be default to be "~/.promptflow/.runs" folder. :type output: Optional[str] :param overwrite: Whether to overwrite the existing run folder. Default to be False. :type overwrite: Optional[bool] :return: The run directory path :rtype: str """ import platform from promptflow.azure.operations._async_run_downloader import AsyncRunDownloader run = Run._validate_and_return_run_name(run) run_folder = self._validate_for_run_download(run=run, output=output, overwrite=overwrite) run_downloader = AsyncRunDownloader._from_run_operations(run_ops=self, run=run, output_folder=run_folder) if platform.system().lower() == "windows": # Reference: https://stackoverflow.com/questions/45600579/asyncio-event-loop-is-closed-when-getting-loop # On Windows seems to be a problem with EventLoopPolicy, use this snippet to work around it asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) async_run_allowing_running_loop(run_downloader.download) result_path = run_folder.resolve().as_posix() logger.info(f"Successfully downloaded run {run!r} to {result_path!r}.") return result_path def _validate_for_run_download(self, run: Union[str, Run], output: Optional[Union[str, Path]], overwrite): """Validate the run download parameters.""" run = Run._validate_and_return_run_name(run) # process the output path if output is None: # default to be "~/.promptflow/.runs" folder output_directory = Path.home() / PROMPT_FLOW_DIR_NAME / PROMPT_FLOW_RUNS_DIR_NAME else: output_directory = Path(output) # validate the run folder run_folder = output_directory / run if run_folder.exists(): if overwrite is True: logger.warning("Removing existing run folder %r.", run_folder.resolve().as_posix()) shutil.rmtree(run_folder) else: raise UserErrorException( f"Run folder {run_folder.resolve().as_posix()!r} already exists, please specify a new output path " f"or set the overwrite flag to be true." ) # check the run status, only download the completed run run = self.get(run=run) if run.status != RunStatus.COMPLETED: raise UserErrorException( f"Can only download the run with status {RunStatus.COMPLETED!r} " f"while {run.name!r}'s status is {run.status!r}." ) run_folder.mkdir(parents=True) return run_folder @monitor_operation(activity_name="pfazure.runs.cancel", activity_type=ActivityType.PUBLICAPI) def cancel(self, run: Union[str, Run], **kwargs) -> None: """Cancel a run. :param run: The run name or run object :type run: Union[str, ~promptflow.entities.Run] """ run = Run._validate_and_return_run_name(run) self._service_caller.cancel_flow_run( subscription_id=self._operation_scope.subscription_id, resource_group_name=self._operation_scope.resource_group_name, workspace_name=self._operation_scope.workspace_name, flow_run_id=run, )
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_fileshare_storeage_helper.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import os from collections import defaultdict from functools import cached_property from multiprocessing import Lock from pathlib import Path from typing import Any, Dict, Optional from azure.ai.ml._artifacts._fileshare_storage_helper import FileStorageClient from azure.ai.ml._utils._asset_utils import ( DirectoryUploadProgressBar, FileUploadProgressBar, IgnoreFile, get_directory_size, ) from azure.core.exceptions import ResourceExistsError from azure.storage.fileshare import DirectoryProperties, ShareDirectoryClient from promptflow._sdk._vendor import get_upload_files_from_folder from promptflow.azure._constants._flow import PROMPTFLOW_FILE_SHARE_DIR from promptflow.azure._utils.gerneral import get_user_alias_from_credential uploading_lock = defaultdict(Lock) class FlowFileStorageClient(FileStorageClient): def __init__(self, credential: str, file_share_name: str, account_url: str, azure_cred): super().__init__(credential=credential, file_share_name=file_share_name, account_url=account_url) try: user_alias = get_user_alias_from_credential(azure_cred) except Exception: # fall back to unknown user when failed to get credential. user_alias = "unknown_user" self._user_alias = user_alias # TODO: update this after we finalize the design for flow file storage client # create user folder if not exist for directory_path in ["Users", f"Users/{user_alias}", f"Users/{user_alias}/{PROMPTFLOW_FILE_SHARE_DIR}"]: self.directory_client = ShareDirectoryClient( account_url=account_url, credential=credential, share_name=file_share_name, directory_path=directory_path, ) # try to create user folder if not exist try: self.directory_client.create_directory() except ResourceExistsError: pass @cached_property def file_share_prefix(self) -> str: return f"Users/{self._user_alias}/{PROMPTFLOW_FILE_SHARE_DIR}" def upload( self, source: str, name: str, version: str, ignore_file: IgnoreFile = IgnoreFile(None), asset_hash: Optional[str] = None, show_progress: bool = True, ) -> Dict[str, str]: """Upload a file or directory to a path inside the file system.""" source_name = Path(source).name dest = asset_hash # truncate path longer than 50 chars for terminal display if show_progress and len(source_name) >= 50: formatted_path = "{:.47}".format(source_name) + "..." else: formatted_path = source_name msg = f"Uploading {formatted_path}" # lock to prevent concurrent uploading of the same file or directory with uploading_lock[self.directory_client.directory_path + "/" + dest]: # start upload if os.path.isdir(source): subdir = self.directory_client.get_subdirectory_client(dest) if not subdir.exists(): # directory is uploaded based on asset hash for now, so skip uploading if subdir exists self.upload_dir( source, dest, msg=msg, show_progress=show_progress, ignore_file=ignore_file, ) else: self.upload_file(source, dest=dest, msg=msg, show_progress=show_progress) artifact_info = {"remote path": dest, "name": name, "version": version} return artifact_info def upload_file( self, source: str, dest: str, show_progress: Optional[bool] = None, msg: Optional[str] = None, in_directory: bool = False, subdirectory_client: Optional[ShareDirectoryClient] = None, callback: Optional[Any] = None, ) -> None: """ " Upload a single file to a path inside the file system directory.""" validate_content = os.stat(source).st_size > 0 # don't do checksum for empty files # relative path from root relative_path = Path(subdirectory_client.directory_path).relative_to(self.directory_client.directory_path) dest = Path(dest).relative_to(relative_path).as_posix() if "/" in dest: # dest is a folder, need to switch subdirectory client dest_dir, dest = dest.rsplit("/", 1) subdirectory_client = subdirectory_client.get_subdirectory_client(dest_dir) with open(source, "rb") as data: if in_directory: file_name = dest.rsplit("/")[-1] if show_progress: subdirectory_client.upload_file( file_name=file_name, data=data, validate_content=validate_content, raw_response_hook=callback, ) else: subdirectory_client.upload_file( file_name=file_name, data=data, validate_content=validate_content, ) else: if show_progress: with FileUploadProgressBar(msg=msg) as progress_bar: self.directory_client.upload_file( file_name=dest, data=data, validate_content=validate_content, raw_response_hook=progress_bar.update_to, ) else: self.directory_client.upload_file(file_name=dest, data=data, validate_content=validate_content) self.uploaded_file_count = self.uploaded_file_count + 1 def upload_dir( self, source: str, dest: str, msg: str, show_progress: bool, ignore_file: IgnoreFile, ) -> None: """Upload a directory to a path inside the fileshare directory.""" subdir = self.directory_client.create_subdirectory(dest) source_path = Path(source).resolve() prefix = dest + "/" upload_paths = get_upload_files_from_folder( path=source_path, prefix=prefix, ignore_file=ignore_file, ) upload_paths = sorted(upload_paths) self.total_file_count = len(upload_paths) # travers all directories recursively and create them in the fileshare def travers_recursively(child_dir, source_dir): for item in os.listdir(source_dir): item_path = os.path.join(source_dir, item) if os.path.isdir(item_path): new_dir = child_dir.create_subdirectory(item) travers_recursively(new_dir, item_path) travers_recursively(child_dir=subdir, source_dir=source) if show_progress: with DirectoryUploadProgressBar(dir_size=get_directory_size(source_path), msg=msg) as progress_bar: for src, destination in upload_paths: self.upload_file( src, destination, in_directory=True, subdirectory_client=subdir, show_progress=show_progress, callback=progress_bar.update_to, ) else: for src, destination in upload_paths: self.upload_file( src, destination, in_directory=True, subdirectory_client=subdir, show_progress=show_progress, ) def _check_file_share_directory_exist(self, dest) -> bool: """Check if the file share directory exists.""" return self.directory_client.get_subdirectory_client(dest).exists() def _check_file_share_file_exist(self, dest) -> bool: """Check if the file share directory exists.""" if dest.startswith(self.file_share_prefix): dest = dest.replace(f"{self.file_share_prefix}/", "") file_client = self.directory_client.get_file_client(dest) try: file_client.get_file_properties() except Exception: return False return True def _delete_file_share_directory(self, dir_client) -> None: """Recursively delete a directory with content in the file share.""" for item in dir_client.list_directories_and_files(): if isinstance(item, DirectoryProperties): self._delete_file_share_directory(dir_client.get_subdirectory_client(item.name)) else: dir_client.delete_file(item.name) dir_client.delete_directory()
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_arm_connection_operations.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from enum import Enum from typing import Any, Dict, Union import requests from azure.ai.ml._restclient.v2023_06_01_preview.models import WorkspaceConnectionPropertiesV2BasicResource from azure.ai.ml._scope_dependent_operations import ( OperationConfig, OperationsContainer, OperationScope, _ScopeDependentOperations, ) from azure.core.exceptions import ClientAuthenticationError from promptflow._sdk.entities._connection import CustomConnection, _Connection from promptflow._utils.retry_utils import http_retry_wrapper from promptflow.azure._restclient.flow_service_caller import FlowServiceCaller from promptflow.azure._utils.gerneral import get_arm_token from promptflow.exceptions import ErrorTarget, SystemErrorException, UserErrorException GET_CONNECTION_URL = ( "/subscriptions/{sub}/resourcegroups/{rg}/providers/Microsoft.MachineLearningServices" "/workspaces/{ws}/connections/{name}/listsecrets?api-version=2023-04-01-preview" ) LIST_CONNECTION_URL = ( "/subscriptions/{sub}/resourcegroups/{rg}/providers/Microsoft.MachineLearningServices" "/workspaces/{ws}/connections?api-version=2023-04-01-preview" ) FLOW_META_PREFIX = "azureml.flow." class ConnectionCategory(str, Enum): AzureOpenAI = "AzureOpenAI" CognitiveSearch = "CognitiveSearch" CognitiveService = "CognitiveService" CustomKeys = "CustomKeys" def get_case_insensitive_key(d, key, default=None): for k, v in d.items(): if k.lower() == key.lower(): return v return default class ArmConnectionOperations(_ScopeDependentOperations): """ArmConnectionOperations. Get connections from arm api. You should not instantiate this class directly. Instead, you should create an PFClient instance that instantiates it for you and attaches it as an attribute. """ def __init__( self, operation_scope: OperationScope, operation_config: OperationConfig, all_operations: OperationsContainer, credential, service_caller: FlowServiceCaller, **kwargs: Dict, ): super(ArmConnectionOperations, self).__init__(operation_scope, operation_config) self._all_operations = all_operations self._service_caller = service_caller self._credential = credential def get(self, name, **kwargs): connection_dict = self.build_connection_dict(name) return _Connection._from_execution_connection_dict(name=name, data=connection_dict) @classmethod def _direct_get(cls, name, subscription_id, resource_group_name, workspace_name, credential): """ This method is added for local pf_client with workspace provider to ensure we only require limited permission(workspace/list secrets). As create azure pf_client requires workspace read permission. """ connection_dict = cls._build_connection_dict( name, subscription_id, resource_group_name, workspace_name, credential ) return _Connection._from_execution_connection_dict(name=name, data=connection_dict) @classmethod def open_url(cls, token, url, action, host="management.azure.com", method="GET", model=None) -> Union[Any, dict]: """ :type token: str :type url: str :type action: str, for the error message format. :type host: str :type method: str :type model: Type[msrest.serialization.Model] """ headers = {"Authorization": f"Bearer {token}"} response = http_retry_wrapper(requests.request)(method, f"https://{host}{url}", headers=headers) message_format = ( f"Open url {{url}} failed with status code: {response.status_code}, action: {action}, reason: {{reason}}" ) if response.status_code == 403: raise AccessDeniedError(operation=url, target=ErrorTarget.RUNTIME) elif 400 <= response.status_code < 500: raise OpenURLFailedUserError( message_format=message_format, url=url, reason=response.reason, ) elif response.status_code != 200: raise OpenURLFailed( message_format=message_format, url=url, reason=response.reason, ) data = response.json() if model: return model.deserialize(data) return data @classmethod def validate_and_fallback_connection_type(cls, name, type_name, category, metadata): if type_name: return type_name if category == ConnectionCategory.AzureOpenAI: return "AzureOpenAI" if category == ConnectionCategory.CognitiveSearch: return "CognitiveSearch" if category == ConnectionCategory.CognitiveService: kind = get_case_insensitive_key(metadata, "Kind") if kind == "Content Safety": return "AzureContentSafety" if kind == "Form Recognizer": return "FormRecognizer" raise UnknownConnectionType( message_format="Connection {name} is not recognized in PromptFlow, " "please make sure the connection is created in PromptFlow.", category=category, name=name, ) @classmethod def build_connection_dict_from_rest_object(cls, name, obj) -> dict: """ :type name: str :type obj: azure.ai.ml._restclient.v2023_06_01_preview.models.WorkspaceConnectionPropertiesV2BasicResource """ # Reference 1: https://msdata.visualstudio.com/Vienna/_git/vienna?path=/src/azureml-api/src/AccountRP/Contracts/WorkspaceConnection/WorkspaceConnectionDtoV2.cs&_a=blame&version=GBmaster # noqa: E501 # Reference 2: https://msdata.visualstudio.com/Vienna/_git/vienna?path=%2Fsrc%2Fazureml-api%2Fsrc%2FDesigner%2Fsrc%2FMiddleTier%2FMiddleTier%2FServices%2FPromptFlow%2FConnectionsManagement.cs&version=GBmaster&_a=contents # noqa: E501 # This connection type covers the generic ApiKey auth connection categories, for examples: # AzureOpenAI: # Category:= AzureOpenAI # AuthType:= ApiKey (as type discriminator) # Credentials:= {ApiKey} as <see cref="ApiKey"/> # Target:= {ApiBase} # # CognitiveService: # Category:= CognitiveService # AuthType:= ApiKey (as type discriminator) # Credentials:= {SubscriptionKey} as <see cref="ApiKey"/> # Target:= ServiceRegion={serviceRegion} # # CognitiveSearch: # Category:= CognitiveSearch # AuthType:= ApiKey (as type discriminator) # Credentials:= {Key} as <see cref="ApiKey"/> # Target:= {Endpoint} # # Use Metadata property bag for ApiType, ApiVersion, Kind and other metadata fields properties = obj.properties type_name = get_case_insensitive_key(properties.metadata, f"{FLOW_META_PREFIX}connection_type") type_name = cls.validate_and_fallback_connection_type(name, type_name, properties.category, properties.metadata) module = get_case_insensitive_key(properties.metadata, f"{FLOW_META_PREFIX}module", "promptflow.connections") # Note: Category is connectionType in MT, but type name should be class name, which is flowValueType in MT. # Handle old connections here, see details: https://github.com/Azure/promptflow/tree/main/connections type_name = f"{type_name}Connection" if not type_name.endswith("Connection") else type_name meta = {"type": type_name, "module": module} if properties.category == ConnectionCategory.AzureOpenAI: value = { "api_key": properties.credentials.key, "api_base": properties.target, "api_type": get_case_insensitive_key(properties.metadata, "ApiType"), "api_version": get_case_insensitive_key(properties.metadata, "ApiVersion"), } # Note: Resource id is required in some cloud scenario, which is not exposed on sdk/cli entity. resource_id = get_case_insensitive_key(properties.metadata, "ResourceId") if resource_id: value["resource_id"] = resource_id elif properties.category == ConnectionCategory.CognitiveSearch: value = { "api_key": properties.credentials.key, "api_base": properties.target, "api_version": get_case_insensitive_key(properties.metadata, "ApiVersion"), } elif properties.category == ConnectionCategory.CognitiveService: value = { "api_key": properties.credentials.key, "endpoint": properties.target, "api_version": get_case_insensitive_key(properties.metadata, "ApiVersion"), } elif properties.category == ConnectionCategory.CustomKeys: # Merge secrets from credentials.keys and other string fields from metadata value = { **properties.credentials.keys, **{k: v for k, v in properties.metadata.items() if not k.startswith(FLOW_META_PREFIX)}, } if type_name == CustomConnection.__name__: meta["secret_keys"] = list(properties.credentials.keys.keys()) else: raise UnknownConnectionType( message_format=( "Unknown connection {name} category {category}, " "please upgrade your promptflow sdk version and retry." ), category=properties.category, name=name, ) # Note: Filter empty values out to ensure default values can be picked when init class object. return {**meta, "value": {k: v for k, v in value.items() if v}} def build_connection_dict(self, name): return self._build_connection_dict( name, self._operation_scope.subscription_id, self._operation_scope.resource_group_name, self._operation_scope.workspace_name, self._credential, ) @classmethod def _convert_to_connection_dict(cls, conn_name, conn_data): try: rest_obj = WorkspaceConnectionPropertiesV2BasicResource.deserialize(conn_data) conn_dict = cls.build_connection_dict_from_rest_object(conn_name, rest_obj) return conn_dict except Exception as e: raise BuildConnectionError( message_format=f"Build connection dict for connection {{name}} failed with {e}.", name=conn_name, ) @classmethod def _build_connection_dict(cls, name, subscription_id, resource_group_name, workspace_name, credential) -> dict: """ :type name: str :type subscription_id: str :type resource_group_name: str :type workspace_name: str :type credential: azure.identity.TokenCredential """ url = GET_CONNECTION_URL.format( sub=subscription_id, rg=resource_group_name, ws=workspace_name, name=name, ) try: rest_obj: WorkspaceConnectionPropertiesV2BasicResource = cls.open_url( get_arm_token(credential=credential), url=url, action="listsecrets", method="POST", model=WorkspaceConnectionPropertiesV2BasicResource, ) except AccessDeniedError: auth_error_message = ( "Access denied to list workspace secret due to invalid authentication. " "Please ensure you have gain RBAC role 'Azure Machine Learning Workspace Connection Secrets Reader' " "for current workspace, and wait for a few minutes to make sure the new role takes effect. " ) raise OpenURLUserAuthenticationError(message=auth_error_message) except ClientAuthenticationError as e: raise UserErrorException(target=ErrorTarget.CONTROL_PLANE_SDK, message=str(e), error=e) except Exception as e: raise SystemErrorException(target=ErrorTarget.CONTROL_PLANE_SDK, message=str(e), error=e) try: return cls.build_connection_dict_from_rest_object(name, rest_obj) except Exception as e: raise BuildConnectionError( message_format=f"Build connection dict for connection {{name}} failed with {e}.", name=name, ) class AccessDeniedError(UserErrorException): """Exception raised when run info can not be found in storage""" def __init__(self, operation: str, target: ErrorTarget): super().__init__(message=f"Access is denied to perform operation {operation!r}", target=target) class OpenURLFailed(SystemErrorException): def __init__(self, **kwargs): super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) class BuildConnectionError(SystemErrorException): def __init__(self, **kwargs): super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) class UserAuthenticationError(UserErrorException): """Exception raised when user authentication failed""" pass class OpenURLUserAuthenticationError(UserAuthenticationError): def __init__(self, **kwargs): super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) class OpenURLFailedUserError(UserErrorException): def __init__(self, **kwargs): super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) class UnknownConnectionType(UserErrorException): def __init__(self, **kwargs): super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs)
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/_async_run_downloader.py
import asyncio import contextvars import functools import json from pathlib import Path from typing import Optional, Union import httpx from azure.core.exceptions import HttpResponseError from azure.storage.blob.aio import BlobServiceClient from promptflow._sdk._constants import DEFAULT_ENCODING, DownloadedRun from promptflow._sdk._errors import DownloadInternalError, RunNotFoundError, RunOperationError from promptflow._sdk.entities import Run from promptflow._utils.logger_utils import get_cli_sdk_logger from promptflow.exceptions import UserErrorException logger = get_cli_sdk_logger() class AsyncRunDownloader: """Download run results from the service asynchronously. :param run: The run id. :type run: str :param run_ops: The run operations. :type run_ops: ~promptflow.azure.operations.RunOperations :param output_folder: The output folder to save the run results. :type output_folder: Union[Path, str] """ IGNORED_PATTERN = ["__pycache__"] def __init__(self, run: str, run_ops: "RunOperations", output_folder: Union[str, Path]) -> None: self.run = run self.run_ops = run_ops self.datastore = run_ops._workspace_default_datastore self.output_folder = Path(output_folder) self.blob_service_client = self._init_blob_service_client() self._use_flow_outputs = False # old runtime does not write debug_info output asset, use flow_outputs instead def _init_blob_service_client(self): logger.debug("Initializing blob service client.") account_url = f"{self.datastore.account_name}.blob.{self.datastore.endpoint}" return BlobServiceClient(account_url=account_url, credential=self.run_ops._credential) async def download(self) -> str: """Download the run results asynchronously.""" error_msg_prefix = f"Failed to download run {self.run!r}" try: # pass verify=False to client to disable SSL verification. # Source: https://github.com/encode/httpx/issues/1331 async with httpx.AsyncClient(verify=False) as client: tasks = [ # put async functions in tasks to run in coroutines self._download_artifacts_and_snapshot(client), # below functions are actually synchronous functions in order to reuse code # and use thread pool to avoid blocking the event loop to_thread(self._download_run_metrics), to_thread(self._download_run_logs), ] await asyncio.gather(*tasks) except RunNotFoundError as e: raise RunOperationError(f"{error_msg_prefix}. Error: {e}") from e except HttpResponseError as e: if e.status_code == 403: raise RunOperationError( f"{error_msg_prefix}. User does not have permission to perform this operation on storage account " f"{self.datastore.account_name!r} container {self.datastore.container_name!r}. " f"Original azure blob error: {str(e)}" ) else: raise DownloadInternalError(f"{error_msg_prefix}. Error: {e}") from e except Exception as e: raise DownloadInternalError(f"{error_msg_prefix}. Error: {e}") from e return self.output_folder.resolve().as_posix() async def _download_artifacts_and_snapshot(self, httpx_client: httpx.AsyncClient): run_data = await self._get_run_data_from_run_history(httpx_client) logger.debug("Parsing run data from run history to get necessary information.") # extract necessary information from run data snapshot_id = run_data["runMetadata"]["properties"]["azureml.promptflow.snapshot_id"] output_data = run_data["runMetadata"]["outputs"].get("debug_info", None) if output_data is None: logger.warning( "Downloading run '%s' but the 'debug_info' output assets is not available, " "maybe because the job ran on old version runtime, trying to get `flow_outputs` output asset instead.", self.run, ) self._use_flow_outputs = True output_data = run_data["runMetadata"]["outputs"].get("flow_outputs", None) output_asset_id = output_data["assetId"] # save run metadata to run_metadata.json logger.debug("Saving the run meta data.") run_data = self.run_ops._refine_run_data_from_run_history(run_data) run_data = Run._from_run_history_entity(run_data) with open(self.output_folder / DownloadedRun.RUN_METADATA_FILE_NAME, "w", encoding=DEFAULT_ENCODING) as f: json.dump(run_data._to_dict(), f, ensure_ascii=False) async with self.blob_service_client: container_name = self.datastore.container_name logger.debug("Getting container client (%s) from workspace default datastore.", container_name) container_client = self.blob_service_client.get_container_client(container_name) async with container_client: tasks = [ self._download_flow_artifacts(httpx_client, container_client, output_asset_id), self._download_snapshot(httpx_client, container_client, snapshot_id), ] await asyncio.gather(*tasks) async def _get_run_data_from_run_history(self, client: httpx.AsyncClient): """Get the run data from the run history.""" logger.debug("Getting run data from run history.") headers = self.run_ops._get_headers() url = self.run_ops._run_history_endpoint_url + "/rundata" payload = { "runId": self.run, "selectRunMetadata": True, "selectRunDefinition": True, "selectJobSpecification": True, } error_msg_prefix = "Failed to get run data from run history" try: response = await client.post(url, headers=headers, json=payload) except Exception as e: raise DownloadInternalError(f"{error_msg_prefix}. Error: {e}") from e else: if response.status_code == 200: return response.json() elif response.status_code == 404: raise RunNotFoundError(f"{error_msg_prefix}. Run {self.run!r} not found.") else: raise DownloadInternalError( f"{error_msg_prefix}. Code: {response.status_code}. Reason: {response.reason_phrase}" ) def _download_run_metrics( self, ): """Download the run metrics.""" logger.debug("Downloading run metrics.") metrics = self.run_ops.get_metrics(self.run) with open(self.output_folder / DownloadedRun.METRICS_FILE_NAME, "w", encoding=DEFAULT_ENCODING) as f: json.dump(metrics, f, ensure_ascii=False) logger.debug("Downloaded run metrics.") async def _download_flow_artifacts(self, httpx_client: httpx.AsyncClient, container_client, output_data): """Download the output data.""" asset_path = await self._get_asset_path(httpx_client, output_data) await self._download_blob_folder_from_asset_path(container_client, asset_path) async def _download_blob_folder_from_asset_path( self, container_client, asset_path: str, local_folder: Optional[Path] = None ): """Download the blob data from the data path.""" logger.debug("Downloading all blobs from data path prefix '%s'", asset_path) if local_folder is None: local_folder = self.output_folder tasks = [] async for blob in container_client.list_blobs(name_starts_with=asset_path): blob_client = container_client.get_blob_client(blob.name) relative_path = Path(blob.name).relative_to(asset_path) local_path = local_folder / relative_path tasks.append(self._download_single_blob(blob_client, local_path)) await asyncio.gather(*tasks) async def _download_single_blob(self, blob_client, local_path: Optional[Path] = None): """Download a single blob.""" if local_path is None: local_path = Path(self.output_folder / blob_client.blob_name) elif local_path.exists(): raise UserErrorException(f"Local file {local_path.resolve().as_posix()!r} already exists.") # ignore some files for item in self.IGNORED_PATTERN: if item in blob_client.blob_name: logger.warning( "Ignoring file '%s' because it matches the ignored pattern '%s'", local_path.as_posix(), item ) return None logger.debug("Downloading blob '%s' to local path '%s'", blob_client.blob_name, local_path.resolve().as_posix()) local_path.parent.mkdir(parents=True, exist_ok=True) async with blob_client: with open(local_path, "wb") as f: stream = await blob_client.download_blob() async for chunk in stream.chunks(): f.write(chunk) return local_path async def _download_snapshot(self, httpx_client: httpx.AsyncClient, container_client, snapshot_id): """Download the flow snapshot.""" snapshot_urls = await self._get_flow_snapshot_urls(httpx_client, snapshot_id) logger.debug("Downloading all snapshot blobs from snapshot urls.") tasks = [] for url in snapshot_urls: blob_name = url.split(self.datastore.container_name)[-1].lstrip("/") blob_client = container_client.get_blob_client(blob_name) relative_path = url.split(self.run)[-1].lstrip("/") local_path = Path(self.output_folder / DownloadedRun.SNAPSHOT_FOLDER / relative_path) tasks.append(self._download_single_blob(blob_client, local_path)) await asyncio.gather(*tasks) async def _get_flow_snapshot_urls(self, httpx_client: httpx.AsyncClient, snapshot_id): logger.debug("Getting flow snapshot blob urls from snapshot id with calling to content service.") headers = self.run_ops._get_headers() endpoint = self.run_ops._run_history_endpoint_url.replace("/history/v1.0", "/content/v2.0") url = endpoint + "/snapshots/sas" payload = { "snapshotOrAssetId": snapshot_id, } error_msg_prefix = ( f"Failed to download flow snapshots with snapshot id {snapshot_id}, " f"because the client failed to retrieve data from content service" ) try: response = await httpx_client.post(url, headers=headers, json=payload) except Exception as e: raise DownloadInternalError(f"{error_msg_prefix}. Error: {e}") from e else: if response.status_code == 200: return self._parse_snapshot_response(response.json()) elif response.status_code == 404: raise DownloadInternalError(f"{error_msg_prefix}. Error: Snapshot id not found.") else: raise DownloadInternalError( f"{error_msg_prefix}. Code: {response.status_code}. Reason: {response.reason_phrase}" ) async def _get_asset_path(self, client: httpx.AsyncClient, asset_id): """Get the asset path from asset id.""" logger.debug("Getting asset path from asset id with calling to data service.") headers = self.run_ops._get_headers() endpoint = self.run_ops._run_history_endpoint_url.replace("/history", "/data") url = endpoint + "/dataversion/getByAssetId" payload = { "value": asset_id, } error_msg_prefix = "Failed to download flow artifacts due to failed to retrieve data from data service" try: response = await client.post(url, headers=headers, json=payload) except Exception as e: raise DownloadInternalError(f"{error_msg_prefix}. Error: {e}") from e if response.status_code != 200: raise DownloadInternalError( f"{error_msg_prefix}. Code: {response.status_code}. Reason: {response.reason_phrase}" ) response_data = response.json() data_path = response_data["dataVersion"]["dataUri"].split("/paths/")[-1] if self._use_flow_outputs: data_path = data_path.replace("flow_outputs", "flow_artifacts") return data_path def _parse_snapshot_response(self, response: dict): """Parse the snapshot response.""" urls = [] if response["absoluteUrl"]: urls.append(response["absoluteUrl"]) for value in response["children"].values(): urls += self._parse_snapshot_response(value) return urls def _download_run_logs(self): """Download the run logs.""" logger.debug("Downloading run logs.") logs = self.run_ops._get_log(self.run) with open(self.output_folder / DownloadedRun.LOGS_FILE_NAME, "w", encoding=DEFAULT_ENCODING) as f: f.write(logs) logger.debug("Downloaded run logs.") @classmethod def _from_run_operations(cls, run_ops: "RunOperations", run: str, output_folder: Union[str, Path]): """Create an instance from run operations.""" from azure.ai.ml.entities._datastore.azure_storage import AzureBlobDatastore datastore = run_ops._workspace_default_datastore if isinstance(datastore, AzureBlobDatastore): return cls(run=run, run_ops=run_ops, output_folder=output_folder) else: raise UserErrorException( f"Cannot download run {run!r} because the workspace default datastore is not supported. Supported ones " f"are ['AzureBlobDatastore'], got {type(datastore).__name__!r}." ) async def to_thread(func, /, *args, **kwargs): # this is copied from asyncio.to_thread() in Python 3.9 # as it is not available in Python 3.8, which is the minimum supported version of promptflow loop = asyncio.get_running_loop() ctx = contextvars.copy_context() func_call = functools.partial(ctx.run, func, *args, **kwargs) return await loop.run_in_executor(None, func_call)
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/operations/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore from ._flow_operations import FlowOperations from ._run_operations import RunOperations __all__ = ["FlowOperations", "RunOperations"]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_schemas/_flow_schema.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import json from pathlib import Path from azure.ai.ml._schema import UnionField, YamlFileSchema from azure.ai.ml._schema.core.fields import LocalPathField from marshmallow import fields, post_load from promptflow._utils.logger_utils import LoggerFactory module_logger = LoggerFactory.get_logger(__name__) class FlowSchema(YamlFileSchema): name = fields.Str(attribute="name") id = fields.Str(attribute="id") description = fields.Str(attribute="description") tags = fields.Dict(keys=fields.Str, attribute="tags") path = UnionField( [ LocalPathField(), fields.Str(), ], ) display_name = fields.Str(attribute="display_name") type = fields.Str(attribute="type") properties = fields.Dict(keys=fields.Str, attribute="properties") @post_load def update_properties(self, dct, **kwargs): folder = Path(self.context["base_path"]) flow_type = dct.get("type") if flow_type: mapping = { "standard": "default", "evaluate": "evaluation", } dct["type"] = mapping[flow_type] properties = dct.get("properties") if properties and "promptflow.batch_inputs" in properties: input_path = properties["promptflow.batch_inputs"] samples_file = folder / input_path if samples_file.exists(): with open(samples_file, "r", encoding="utf-8") as fp: properties["promptflow.batch_inputs"] = json.loads(fp.read()) return dct
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_schemas/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_ml/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- """ This file stores functions and objects that will be used in prompt-flow sdk. DO NOT change the module names in "all" list, add new modules if needed. """ class _DummyCallableClassForLazyImportError: """This class is used to put off ImportError until the imported class or function is called.""" @classmethod def _get_message(cls): return "azure-ai-ml is not installed. Please install azure-ai-ml to use this feature." def __init__(self, *args, **kwargs): raise ImportError(self._get_message()) def __call__(self, *args, **kwargs): raise ImportError(self._get_message()) # TODO: avoid import azure.ai.ml if promptflow.azure.configure is not called try: from azure.ai.ml import MLClient, load_component from azure.ai.ml.entities import Component from azure.ai.ml.entities._assets import Code from azure.ai.ml.entities._component._additional_includes import AdditionalIncludesMixin from azure.ai.ml.entities._load_functions import load_common except ImportError: class load_component(_DummyCallableClassForLazyImportError): pass class Component(_DummyCallableClassForLazyImportError): pass class MLClient(_DummyCallableClassForLazyImportError): pass class load_common(_DummyCallableClassForLazyImportError): pass class Code(_DummyCallableClassForLazyImportError): pass class AdditionalIncludesMixin(_DummyCallableClassForLazyImportError): pass __all__ = [ "load_component", "Component", "MLClient", "load_common", "Code", "AdditionalIncludesMixin", ]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_entities/_workspace_connection_spec.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from dataclasses import asdict, dataclass from promptflow.azure._restclient.flow.models import ConnectionConfigSpec as RestConnectionConfigSpec from promptflow.azure._restclient.flow.models import WorkspaceConnectionSpec as RestWorkspaceConnectionSpec @dataclass class ConnectionConfigSpec: name: str display_name: str config_value_type: str default_value: str = None description: str = None enum_values: list = None is_optional: bool = False @classmethod def _from_rest_object(cls, rest_obj: RestConnectionConfigSpec): return cls( name=rest_obj.name, display_name=rest_obj.display_name, config_value_type=rest_obj.config_value_type, default_value=rest_obj.default_value, description=rest_obj.description, enum_values=rest_obj.enum_values, is_optional=rest_obj.is_optional, ) def _to_dict(self): return asdict(self, dict_factory=lambda x: {k: v for (k, v) in x if v is not None}) @dataclass class WorkspaceConnectionSpec: module: str connection_type: str # Connection type example: AzureOpenAI flow_value_type: str # Flow value type is the input.type on node, example: AzureOpenAIConnection config_specs: list = None @classmethod def _from_rest_object(cls, rest_obj: RestWorkspaceConnectionSpec): return cls( config_specs=[ ConnectionConfigSpec._from_rest_object(config_spec) for config_spec in (rest_obj.config_specs or []) ], module=rest_obj.module, connection_type=rest_obj.connection_type, flow_value_type=rest_obj.flow_value_type, ) def _to_dict(self): return asdict(self, dict_factory=lambda x: {k: v for (k, v) in x if v is not None})
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_entities/_flow.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import copy import os.path from contextlib import contextmanager from os import PathLike from pathlib import Path from typing import Dict, List, Optional, Union import pydash from promptflow._sdk._constants import DAG_FILE_NAME, SERVICE_FLOW_TYPE_2_CLIENT_FLOW_TYPE, AzureFlowSource, FlowType from promptflow.azure._ml import AdditionalIncludesMixin, Code from ..._sdk._utils import PromptflowIgnoreFile, load_yaml, remove_empty_element_from_dict from ..._utils.flow_utils import dump_flow_dag, load_flow_dag from ..._utils.logger_utils import LoggerFactory from .._constants._flow import ADDITIONAL_INCLUDES, DEFAULT_STORAGE, ENVIRONMENT, PYTHON_REQUIREMENTS_TXT from .._restclient.flow.models import FlowDto # pylint: disable=redefined-builtin, unused-argument, f-string-without-interpolation logger = LoggerFactory.get_logger(__name__) class Flow(AdditionalIncludesMixin): DEFAULT_REQUIREMENTS_FILE_NAME = "requirements.txt" def __init__( self, path: Union[str, PathLike], name: Optional[str] = None, type: Optional[str] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs, ): self._flow_source = kwargs.pop("flow_source", AzureFlowSource.LOCAL) self.path = path self.name = name self.type = type or FlowType.STANDARD self.display_name = kwargs.get("display_name", None) or name self.description = description self.tags = tags self.owner = kwargs.get("owner", None) self.is_archived = kwargs.get("is_archived", None) self.created_date = kwargs.get("created_date", None) self.flow_portal_url = kwargs.get("flow_portal_url", None) if self._flow_source == AzureFlowSource.LOCAL: absolute_path = self._validate_flow_from_source(path) # flow snapshot folder self.code = absolute_path.parent.as_posix() self._code_uploaded = False self.path = absolute_path.name self._flow_dict = self._load_flow_yaml(absolute_path) self.display_name = self.display_name or absolute_path.parent.name self.description = description or self._flow_dict.get("description", None) self.tags = tags or self._flow_dict.get("tags", None) elif self._flow_source == AzureFlowSource.PF_SERVICE: self.code = kwargs.get("flow_resource_id", None) elif self._flow_source == AzureFlowSource.INDEX: self.code = kwargs.get("entity_id", None) def _validate_flow_from_source(self, source: Union[str, PathLike]) -> Path: """Validate flow from source. :param source: The source of the flow. :type source: Union[str, PathLike] """ absolute_path = Path(source).resolve().absolute() if absolute_path.is_dir(): absolute_path = absolute_path / DAG_FILE_NAME if not absolute_path.exists(): raise ValueError(f"Flow file {absolute_path.as_posix()} does not exist.") return absolute_path def _load_flow_yaml(self, path: Union[str, Path]) -> Dict: """Load flow yaml file. :param path: The path of the flow yaml file. :type path: str """ return load_yaml(path) @classmethod def _resolve_requirements(cls, flow_path: Union[str, Path], flow_dag: dict): """If requirements.txt exists, add it to the flow snapshot. Return True if flow_dag is updated.""" flow_dir = Path(flow_path) if not (flow_dir / cls.DEFAULT_REQUIREMENTS_FILE_NAME).exists(): return False if pydash.get(flow_dag, f"{ENVIRONMENT}.{PYTHON_REQUIREMENTS_TXT}"): return False logger.debug( f"requirements.txt is found in the flow folder: {flow_path.resolve().as_posix()}, " "adding it to flow.dag.yaml." ) pydash.set_(flow_dag, f"{ENVIRONMENT}.{PYTHON_REQUIREMENTS_TXT}", cls.DEFAULT_REQUIREMENTS_FILE_NAME) return True @classmethod def _remove_additional_includes(cls, flow_dag: dict): """Remove additional includes from flow dag. Return True if removed.""" if ADDITIONAL_INCLUDES not in flow_dag: return False logger.debug("Additional includes are found in the flow dag, removing them from flow.dag.yaml after resolved.") flow_dag.pop(ADDITIONAL_INCLUDES, None) return True # region AdditionalIncludesMixin @contextmanager def _try_build_local_code(self) -> Optional[Code]: """Try to create a Code object pointing to local code and yield it. If there is no local code to upload, yield None. Otherwise, yield a Code object pointing to the code. """ with super()._try_build_local_code() as code: dag_updated = False if isinstance(code, Code): flow_dir = Path(code.path) _, flow_dag = load_flow_dag(flow_path=flow_dir) original_flow_dag = copy.deepcopy(flow_dag) if self._get_all_additional_includes_configs(): # Remove additional include in the flow yaml. dag_updated = self._remove_additional_includes(flow_dag) # promptflow snapshot has specific ignore logic, like it should ignore `.run` by default code._ignore_file = PromptflowIgnoreFile(flow_dir) # promptflow snapshot will always be uploaded to default storage code.datastore = DEFAULT_STORAGE dag_updated = self._resolve_requirements(flow_dir, flow_dag) or dag_updated if dag_updated: dump_flow_dag(flow_dag, flow_dir) try: yield code finally: if dag_updated: dump_flow_dag(original_flow_dag, flow_dir) def _get_base_path_for_code(self) -> Path: """Get base path for additional includes.""" # note that self.code is an absolute path, so it is safe to use it as base path return Path(self.code) def _get_all_additional_includes_configs(self) -> List: """Get all additional include configs. For flow, its additional include need to be read from dag with a helper function. """ from promptflow._sdk._utils import _get_additional_includes return _get_additional_includes(os.path.join(self.code, self.path)) # endregion @classmethod def _from_pf_service(cls, rest_object: FlowDto): return cls( flow_source=AzureFlowSource.PF_SERVICE, path=rest_object.flow_definition_file_path, name=rest_object.flow_id, type=SERVICE_FLOW_TYPE_2_CLIENT_FLOW_TYPE[str(rest_object.flow_type).lower()], description=rest_object.description, tags=rest_object.tags, display_name=rest_object.flow_name, flow_resource_id=rest_object.flow_resource_id, owner=rest_object.owner.as_dict(), is_archived=rest_object.is_archived, created_date=rest_object.created_date, flow_portal_url=rest_object.studio_portal_endpoint, ) @classmethod def _from_index_service(cls, rest_object: Dict): properties = rest_object["properties"] annotations = rest_object["annotations"] flow_type = properties.get("flowType", None).lower() # rag type flow is shown as standard flow in UX, not sure why this type exists in service code if flow_type == "rag": flow_type = FlowType.STANDARD elif flow_type: flow_type = SERVICE_FLOW_TYPE_2_CLIENT_FLOW_TYPE[flow_type] return cls( flow_source=AzureFlowSource.INDEX, path=properties.get("flowDefinitionFilePath", None), name=properties.get("flowId", None), display_name=annotations.get("flowName", None), type=flow_type, description=annotations.get("description", None), tags=annotations.get("tags", None), entity_id=rest_object["entityId"], owner=annotations.get("owner", None), is_archived=annotations.get("isArchived", None), created_date=annotations.get("createdDate", None), ) def _to_dict(self): result = { "name": self.name, "type": self.type, "description": self.description, "tags": self.tags, "path": self.path, "code": str(self.code), "display_name": self.display_name, "owner": self.owner, "is_archived": self.is_archived, "created_date": str(self.created_date), "flow_portal_url": self.flow_portal_url, } return remove_empty_element_from_dict(result)
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_entities/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/resources/component_spec_template.yaml
$schema: https://azuremlschemas.azureedge.net/latest/commandComponent.schema.json # will be changed to flow to support parallelism type: command outputs: output: # PRS team will always aggregate all the outputs into a single file under this folder for now type: uri_folder
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_utils/gerneral.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import jwt from promptflow.exceptions import ValidationException def is_arm_id(obj) -> bool: return isinstance(obj, str) and obj.startswith("azureml://") def get_token(credential, resource) -> str: from azure.ai.ml._azure_environments import _resource_to_scopes azure_ml_scopes = _resource_to_scopes(resource) token = credential.get_token(*azure_ml_scopes).token # validate token has aml audience decoded_token = jwt.decode( token, options={"verify_signature": False, "verify_aud": False}, ) if decoded_token.get("aud") != resource: msg = """AAD token with aml scope could not be fetched using the credentials being used. Please validate if token with {0} scope can be fetched using credentials provided to PFClient. Token with {0} scope can be fetched using credentials.get_token({0}) """ raise ValidationException( message=msg.format(*azure_ml_scopes), ) return token def get_aml_token(credential) -> str: from azure.ai.ml._azure_environments import _get_aml_resource_id_from_metadata resource = _get_aml_resource_id_from_metadata() return get_token(credential, resource) def get_arm_token(credential) -> str: from azure.ai.ml._azure_environments import _get_base_url_from_metadata resource = _get_base_url_from_metadata() return get_token(credential, resource) def get_authorization(credential=None) -> str: token = get_arm_token(credential=credential) return "Bearer " + token def get_user_alias_from_credential(credential): token = get_arm_token(credential=credential) decode_json = jwt.decode(token, options={"verify_signature": False, "verify_aud": False}) try: email = decode_json.get("upn", decode_json.get("email", None)) return email.split("@")[0] except Exception: # use oid when failed to get upn, e.g. service principal return decode_json["oid"]
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_utils/_url_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import re class BulkRunURL: """Parser for a flow run URL.""" REGEX_PATTERN = ".*prompts/flow/([^/]+)/([^/]+)/bulktest/([^/]+).*" RUN_URL_FORMAT = ( "https://ml.azure.com/prompts/flow/{}/{}/bulktest/{}/details?wsid=" "/subscriptions/{}/resourcegroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}" ) def __init__(self, url: str): if url: match = re.match(self.REGEX_PATTERN, url) if match: self.experiment_id = match.group(1) self.flow_id = match.group(2) self.bulk_test_id = match.group(3) else: raise ValueError("Invalid flow run URL: {}".format(url)) @classmethod def get_url(cls, experiment_id, flow_id, bulk_test_id, subscription_id, resource_group, workspace_name): return cls.RUN_URL_FORMAT.format( experiment_id, flow_id, bulk_test_id, subscription_id, resource_group, workspace_name ) class BulkRunId: """Parser for a flow run ID.""" REGEX_PATTERN = "azureml://experiment/([^/]+)/flow/([^/]+)/bulktest/([^/]+)(/run/[^/]+)?" RUN_ID_FORMAT = "azureml://experiment/{}/flow/{}/bulktest/{}" def __init__(self, arm_id: str): if arm_id: match = re.match(self.REGEX_PATTERN, arm_id) if match: self.experiment_id = match.group(1) self.flow_id = match.group(2) self.bulk_test_id = match.group(3) if len(match.groups()) > 3: self.run_id = match.group(4).split("/")[-1].strip() else: self.run_id = None else: raise ValueError("Invalid flow run ID: {}".format(arm_id)) @classmethod def get_url(cls, experiment_id, flow_id, bulk_test_id, *, run_id=None): arm_id = cls.RUN_ID_FORMAT.format(experiment_id, flow_id, bulk_test_id) if run_id: arm_id += "/run/{}".format(run_id) return arm_id
0
promptflow_repo/promptflow/src/promptflow/promptflow/azure
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_utils/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore from .gerneral import is_arm_id __all__ = ["is_arm_id"]
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/logger_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- # This file is for open source, # so it should not contain any dependency on azure or azureml-related packages. import json import logging import os import sys from contextvars import ContextVar from dataclasses import dataclass from functools import partial from typing import List, Optional from promptflow._constants import PF_LOGGING_LEVEL from promptflow._utils.credential_scrubber import CredentialScrubber from promptflow._utils.exception_utils import ExceptionPresenter from promptflow.contracts.run_mode import RunMode # The maximum length of logger name is 18 ("promptflow-runtime"). # The maximum digit length of process id is 5. Fix the field width to 7. # So fix the length of these fields in the formatter. # May need to change if logger name/process id length changes. LOG_FORMAT = "%(asctime)s %(process)7d %(name)-18s %(levelname)-8s %(message)s" DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S %z" class CredentialScrubberFormatter(logging.Formatter): """Formatter that scrubs credentials in logs.""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._default_scrubber = CredentialScrubber() self._context_var = ContextVar("credential_scrubber", default=None) @property def credential_scrubber(self): credential_scrubber = self._context_var.get() if credential_scrubber: return credential_scrubber return self._default_scrubber def set_credential_list(self, credential_list: List[str]): """Set credential list, which will be scrubbed in logs.""" credential_scrubber = CredentialScrubber() for c in credential_list: credential_scrubber.add_str(c) self._context_var.set(credential_scrubber) def clear(self): """Clear context variable.""" self._context_var.set(None) def format(self, record): """Override logging.Formatter's format method and remove credentials from log.""" s: str = super().format(record) s = self._handle_traceback(s, record) s = self._handle_customer_content(s, record) return self.credential_scrubber.scrub(s) def _handle_customer_content(self, s: str, record: logging.LogRecord) -> str: """Handle customer content in log message. Derived class can override this method to handle customer content in log. """ # If log record does not have "customer_content" field, return input logging string directly. if not hasattr(record, "customer_content"): return s customer_content = record.customer_content if isinstance(customer_content, Exception): # If customer_content is an exception, convert it to string. customer_str = self._convert_exception_to_str(customer_content) elif isinstance(customer_content, str): customer_str = customer_content else: customer_str = str(customer_content) return s.replace("{customer_content}", customer_str) def _handle_traceback(self, s: str, record: logging.LogRecord) -> str: """Interface method for handling traceback in log message. Derived class can override this method to handle traceback in log. """ return s def _convert_exception_to_str(self, ex: Exception) -> str: """Convert exception a user-friendly string.""" try: return json.dumps(ExceptionPresenter.create(ex).to_dict(include_debug_info=True), indent=2) except: # noqa: E722 return str(ex) class FileHandler: """Write compliant log to a file.""" def __init__(self, file_path: str, formatter: Optional[logging.Formatter] = None): self._stream_handler = self._get_stream_handler(file_path) if formatter is None: # Default formatter to scrub credentials in log message, exception and stack trace. self._formatter = CredentialScrubberFormatter(fmt=LOG_FORMAT, datefmt=DATETIME_FORMAT) else: self._formatter = formatter self._stream_handler.setFormatter(self._formatter) def set_credential_list(self, credential_list: List[str]): """Set credential list, which will be scrubbed in logs.""" self._formatter.set_credential_list(credential_list) def emit(self, record: logging.LogRecord): """Write logs.""" self._stream_handler.emit(record) def close(self): """Close stream handler.""" self._stream_handler.close() self._formatter.clear() def _get_stream_handler(self, file_path) -> logging.StreamHandler: """This method can be overridden by derived class to save log file in cloud.""" return logging.FileHandler(file_path, encoding="UTF-8") class FileHandlerConcurrentWrapper(logging.Handler): """Wrap context-local FileHandler instance for thread safety. A logger instance can write different log to different files in different contexts. """ def __init__(self): super().__init__() self._context_var = ContextVar("handler", default=None) @property def handler(self) -> FileHandler: return self._context_var.get() @handler.setter def handler(self, handler: FileHandler): self._context_var.set(handler) def emit(self, record: logging.LogRecord): """Override logging.Handler's emit method. Get inner file handler in current context and write log. """ stream_handler: FileHandler = self._context_var.get() if stream_handler is None: return stream_handler.emit(record) def clear(self): """Close file handler and clear context variable.""" handler: FileHandler = self._context_var.get() if handler: try: handler.close() except: # NOQA: E722 # Do nothing if handler close failed. pass self._context_var.set(None) valid_logging_level = {"CRITICAL", "FATAL", "ERROR", "WARN", "WARNING", "INFO", "DEBUG", "NOTSET"} def get_pf_logging_level(default=logging.INFO): logging_level = os.environ.get(PF_LOGGING_LEVEL, None) if logging_level not in valid_logging_level: # Fall back to info if user input is invalid. logging_level = default return logging_level def get_logger(name: str) -> logging.Logger: """Get logger used during execution.""" logger = logging.Logger(name) logger.setLevel(get_pf_logging_level()) logger.addHandler(FileHandlerConcurrentWrapper()) stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(CredentialScrubberFormatter(fmt=LOG_FORMAT, datefmt=DATETIME_FORMAT)) logger.addHandler(stdout_handler) return logger # Logs by flow_logger will only be shown in flow mode. # These logs should contain all detailed logs from executor and runtime. flow_logger = get_logger("execution.flow") # Logs by bulk_logger will only be shown in bulktest and eval modes. # These logs should contain overall progress logs and error logs. bulk_logger = get_logger("execution.bulk") # Logs by logger will be shown in all the modes above, # such as error logs. logger = get_logger("execution") logger_contexts = [] @dataclass class LogContext: """A context manager to setup logger context for input_logger, logger, flow_logger and bulk_logger.""" file_path: str # Log file path. run_mode: Optional[RunMode] = RunMode.Test credential_list: Optional[List[str]] = None # These credentials will be scrubbed in logs. input_logger: logging.Logger = None # If set, then context will also be set for input_logger. def get_initializer(self): return partial( LogContext, file_path=self.file_path, run_mode=self.run_mode, credential_list=self.credential_list ) @staticmethod def get_current() -> Optional["LogContext"]: global logger_contexts if logger_contexts: return logger_contexts[-1] return None @staticmethod def set_current(context: "LogContext"): global logger_contexts if isinstance(context, LogContext): logger_contexts.append(context) @staticmethod def clear_current(): global logger_contexts if logger_contexts: logger_contexts.pop() def __enter__(self): self._set_log_path() self._set_credential_list() LogContext.set_current(self) def __exit__(self, *args): """Clear context-local variables.""" all_logger_list = [logger, flow_logger, bulk_logger] if self.input_logger: all_logger_list.append(self.input_logger) for logger_ in all_logger_list: for handler in logger_.handlers: if isinstance(handler, FileHandlerConcurrentWrapper): handler.clear() elif isinstance(handler.formatter, CredentialScrubberFormatter): handler.formatter.clear() LogContext.clear_current() def _set_log_path(self): if not self.file_path: return logger_list = self._get_loggers_to_set_path() for logger_ in logger_list: for log_handler in logger_.handlers: if isinstance(log_handler, FileHandlerConcurrentWrapper): handler = FileHandler(self.file_path) log_handler.handler = handler def _set_credential_list(self): # Set credential list to all loggers. all_logger_list = self._get_execute_loggers_list() if self.input_logger: all_logger_list.append(self.input_logger) credential_list = self.credential_list or [] for logger_ in all_logger_list: for handler in logger_.handlers: if isinstance(handler, FileHandlerConcurrentWrapper) and handler.handler: handler.handler.set_credential_list(credential_list) elif isinstance(handler.formatter, CredentialScrubberFormatter): handler.formatter.set_credential_list(credential_list) def _get_loggers_to_set_path(self) -> List[logging.Logger]: logger_list = [logger] if self.input_logger: logger_list.append(self.input_logger) # For Batch run mode, set log path for bulk_logger, # otherwise for flow_logger. if self.run_mode == RunMode.Batch: logger_list.append(bulk_logger) else: logger_list.append(flow_logger) return logger_list @classmethod def _get_execute_loggers_list(cls) -> List[logging.Logger]: # return all loggers for executor return [logger, flow_logger, bulk_logger] def update_log_path(log_path: str, input_logger: logging.Logger = None): logger_list = [logger, bulk_logger, flow_logger] if input_logger: logger_list.append(input_logger) for logger_ in logger_list: update_single_log_path(log_path, logger_) def update_single_log_path(log_path: str, logger_: logging.Logger): for wrapper in logger_.handlers: if isinstance(wrapper, FileHandlerConcurrentWrapper): handler: FileHandler = wrapper.handler if handler: wrapper.handler = type(handler)(log_path, handler._formatter) def scrub_credentials(s: str): """Scrub credentials in string s. For example, for input string: "print accountkey=accountKey", the output will be: "print accountkey=**data_scrubbed**" """ for h in logger.handlers: if isinstance(h, FileHandlerConcurrentWrapper): if h.handler and h.handler._formatter: credential_scrubber = h.handler._formatter.credential_scrubber if credential_scrubber: return credential_scrubber.scrub(s) return CredentialScrubber().scrub(s) class LoggerFactory: @staticmethod def get_logger(name: str, verbosity: int = logging.INFO, target_stdout: bool = False): logger = logging.getLogger(name) logger.propagate = False # Set default logger level to debug, we are using handler level to control log by default logger.setLevel(logging.DEBUG) # Use env var at first, then use verbosity verbosity = get_pf_logging_level(default=None) or verbosity if not LoggerFactory._find_handler(logger, logging.StreamHandler): LoggerFactory._add_handler(logger, verbosity, target_stdout) # TODO: Find a more elegant way to set the logging level for azure.core.pipeline.policies._universal azure_logger = logging.getLogger("azure.core.pipeline.policies._universal") azure_logger.setLevel(logging.DEBUG) LoggerFactory._add_handler(azure_logger, logging.DEBUG, target_stdout) return logger @staticmethod def _find_handler(logger: logging.Logger, handler_type: type) -> Optional[logging.Handler]: for log_handler in logger.handlers: if isinstance(log_handler, handler_type): return log_handler return None @staticmethod def _add_handler(logger: logging.Logger, verbosity: int, target_stdout: bool = False) -> None: # set target_stdout=True can log data into sys.stdout instead of default sys.stderr, in this way # logger info and python print result can be synchronized handler = logging.StreamHandler(stream=sys.stdout) if target_stdout else logging.StreamHandler() formatter = logging.Formatter("[%(asctime)s][%(name)s][%(levelname)s] - %(message)s") handler.setFormatter(formatter) handler.setLevel(verbosity) logger.addHandler(handler) def get_cli_sdk_logger(): """Get logger used by CLI SDK.""" # cli sdk logger default logging level is WARNING # here the logger name "promptflow" is from promptflow._sdk._constants.LOGGER_NAME, # to avoid circular import error, use plain string here instead of importing from _constants # because this function is also called in _prepare_home_dir which is in _constants return LoggerFactory.get_logger("promptflow", verbosity=logging.WARNING)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/_errors.py
from promptflow.exceptions import SystemErrorException, UserErrorException, ValidationException class InvalidImageInput(ValidationException): pass class LoadMultimediaDataError(UserErrorException): pass class YamlParseError(SystemErrorException): """Exception raised when yaml parse failed.""" pass
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/context_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- """!!!Note: context in this file only used for command line related logics, please avoid using them in service code!!!""" import contextlib import os import sys @contextlib.contextmanager def _change_working_dir(path, mkdir=True): """Context manager for changing the current working directory""" saved_path = os.getcwd() if mkdir: os.makedirs(path, exist_ok=True) os.chdir(str(path)) try: yield finally: os.chdir(saved_path) @contextlib.contextmanager def inject_sys_path(path): original_sys_path = sys.path.copy() sys.path.insert(0, str(path)) try: yield finally: sys.path = original_sys_path
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/execution_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from typing import AbstractSet, Any, Dict, List, Mapping from promptflow._utils.logger_utils import logger from promptflow.contracts.flow import Flow, FlowInputDefinition, InputValueType from promptflow.contracts.run_info import FlowRunInfo, Status def apply_default_value_for_input(inputs: Dict[str, FlowInputDefinition], line_inputs: Mapping) -> Dict[str, Any]: updated_inputs = dict(line_inputs or {}) for key, value in inputs.items(): if key not in updated_inputs and (value and value.default is not None): updated_inputs[key] = value.default return updated_inputs def handle_line_failures(run_infos: List[FlowRunInfo], raise_on_line_failure: bool = False): """Handle line failures in batch run""" failed = [i for i, r in enumerate(run_infos) if r.status == Status.Failed] failed_msg = None if len(failed) > 0: failed_indexes = ",".join([str(i) for i in failed]) first_fail_exception = run_infos[failed[0]].error["message"] if raise_on_line_failure: failed_msg = "Flow run failed due to the error: " + first_fail_exception raise Exception(failed_msg) failed_msg = ( f"{len(failed)}/{len(run_infos)} flow run failed, indexes: [{failed_indexes}]," f" exception of index {failed[0]}: {first_fail_exception}" ) logger.error(failed_msg) def get_aggregation_inputs_properties(flow: Flow) -> AbstractSet[str]: """Return the serialized InputAssignment of the aggregation nodes inputs. For example, an aggregation node refers the outputs of a node named "grade", then this function will return set("${grade.output}"). """ normal_node_names = {node.name for node in flow.nodes if flow.is_normal_node(node.name)} properties = set() for node in flow.nodes: if node.name in normal_node_names: continue for value in node.inputs.values(): if not value.value_type == InputValueType.NODE_REFERENCE: continue if value.value in normal_node_names: properties.add(value.serialize()) return properties def collect_lines(indexes: List[int], kvs: Mapping[str, List]) -> Mapping[str, List]: """Collect the values from the kvs according to the indexes.""" return {k: [v[i] for i in indexes] for k, v in kvs.items()}
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/feature_utils.py
from dataclasses import dataclass from enum import Enum from typing import Optional class FeatureState(Enum): """The enum of feature state. READY: The feature is ready to use. E2ETEST: The feature is not ready to be shipped to customer and is in e2e testing. """ READY = "Ready" E2ETEST = "E2ETest" @dataclass class Feature: """The dataclass of feature.""" name: str description: str state: FeatureState component: Optional[str] = "executor" def get_feature_list(): feature_list = [ Feature( name="ActivateConfig", description="Bypass node execution when the node does not meet activate condition.", state=FeatureState.READY, ), Feature( name="Image", description="Support image input and output.", state=FeatureState.READY, ), Feature( name="EnvironmentVariablesInYaml", description="Support environment variables in flow.dag.yaml.", state=FeatureState.READY, ), ] return feature_list
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/flow_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import hashlib import os from os import PathLike from pathlib import Path from typing import Union from promptflow._sdk._constants import DAG_FILE_NAME, DEFAULT_ENCODING from promptflow._utils.logger_utils import LoggerFactory from promptflow._utils.yaml_utils import dump_yaml, load_yaml logger = LoggerFactory.get_logger(name=__name__) def get_flow_lineage_id(flow_dir: Union[str, PathLike]): """ Get the lineage id for flow. The flow lineage id will be same for same flow in same GIT repo or device. If the flow locates in GIT repo: use Repo name + relative path to flow_dir as session id Otherwise: use device id + absolute path to flow_dir as session id :param flow_dir: flow directory """ flow_dir = Path(flow_dir).resolve() if not flow_dir.is_dir(): flow_dir = flow_dir.parent try: from git import Repo repo = Repo(flow_dir, search_parent_directories=True) lineage_id = f"{os.path.basename(repo.working_dir)}/{flow_dir.relative_to(repo.working_dir).as_posix()}" logger.debug("Got lineage id %s from git repo.", lineage_id) except Exception: # failed to get repo, use device id + absolute path to flow_dir as session id import uuid device_id = uuid.getnode() lineage_id = f"{device_id}/{flow_dir.absolute().as_posix()}" logger.debug("Got lineage id %s from local since failed to get git info.", lineage_id) # hash the value to avoid it gets too long, and it's not user visible. lineage_id = hashlib.sha256(lineage_id.encode()).hexdigest() return lineage_id def resolve_flow_path(flow_path: Path): """Resolve given flow path to dag file path.""" if flow_path.is_dir(): flow_path = flow_path / DAG_FILE_NAME return flow_path def load_flow_dag(flow_path: Path): """Load flow dag from given flow path.""" flow_path = resolve_flow_path(flow_path) if not flow_path.exists(): raise FileNotFoundError(f"Flow file {flow_path} not found") with open(flow_path, "r", encoding=DEFAULT_ENCODING) as f: flow_dag = load_yaml(f) return flow_path, flow_dag def dump_flow_dag(flow_dag: dict, flow_path: Path): """Dump flow dag to given flow path.""" flow_path = resolve_flow_path(flow_path) with open(flow_path, "w", encoding=DEFAULT_ENCODING) as f: dump_yaml(flow_dag, f) return flow_path
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/thread_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import contextvars import logging import threading from promptflow._utils.utils import set_context class RepeatLogTimer(threading.Timer): """Repeat to log message every interval seconds until it is cancelled.""" def __init__( self, interval_seconds: float, logger: logging.Logger, level: int, log_message_function, args: tuple = None ): self._logger = logger self._level = level self._log_message_function = log_message_function self._function_args = args if args else tuple() self._context = contextvars.copy_context() super().__init__(interval_seconds, function=None) def __enter__(self): self.start() return self def __exit__(self, *args): self.cancel() def run(self): """Override Timer.run method.""" # Set context variables from parent context. set_context(self._context) while not self.finished.wait(self.interval): if not self.finished.is_set(): msgs = self._log_message_function(*self._function_args) for msg in msgs: self._logger.log(self._level, msg) self.finished.set()
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/connection_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import io import re from jinja2 import Template from .yaml_utils import dump_yaml, load_yaml_string def generate_custom_strong_type_connection_spec(cls, package, package_version): connection_spec = { "connectionCategory": "CustomKeys", "flowValueType": "CustomConnection", "connectionType": cls.__name__, "ConnectionTypeDisplayName": cls.__name__, "configSpecs": [], "module": cls.__module__, "package": package, "package_version": package_version, } for k, typ in cls.__annotations__.items(): spec = { "name": k, "displayName": k.replace("_", " ").title(), "configValueType": typ.__name__, } if hasattr(cls, k): spec["isOptional"] = getattr(cls, k, None) is not None else: spec["isOptional"] = False connection_spec["configSpecs"].append(spec) return connection_spec def generate_custom_strong_type_connection_template(cls, connection_spec, package, package_version): connection_template_str = """ $schema: https://azuremlschemas.azureedge.net/promptflow/latest/CustomStrongTypeConnection.schema.json name: "to_replace_with_connection_name" type: custom custom_type: {{ custom_type }} module: {{ module }} package: {{ package }} package_version: {{ package_version }} configs:{% for key, value in configs.items() %} {{ key }}: "{{ value -}}"{% endfor %} secrets: # must-have{% for key, value in secrets.items() %} {{ key }}: "{{ value -}}"{% endfor %} """ connection_template = Template(connection_template_str) # Extract configs and secrets configs = {} secrets = {} for spec in connection_spec["configSpecs"]: if spec["configValueType"] == "Secret": secrets[spec["name"]] = "to_replace_with_" + spec["name"].replace("-", "_") else: configs[spec["name"]] = getattr(cls, spec["name"], None) or "to_replace_with_" + spec["name"].replace( "-", "_" ) # Prepare data for template data = { "custom_type": cls.__name__, "module": cls.__module__, "package": package, "package_version": package_version, "configs": configs, "secrets": secrets, } connection_template_with_data = connection_template.render(data) connection_template_with_comments = render_comments( connection_template_with_data, cls, secrets.keys(), configs.keys() ) return connection_template_with_comments def render_comments(connection_template, cls, secrets, configs): if cls.__doc__ is not None: data = load_yaml_string(connection_template) comments_map = extract_comments_mapping(list(secrets) + list(configs), cls.__doc__) # Add comments for secret keys for key in secrets: if key in comments_map.keys(): data["secrets"].yaml_add_eol_comment(comments_map[key] + "\n", key) # Add comments for config keys for key in configs: if key in comments_map.keys(): data["configs"].yaml_add_eol_comment(comments_map[key] + "\n", key) # Dump data object back to string buf = io.StringIO() dump_yaml(data, buf) connection_template_with_comments = buf.getvalue() return connection_template_with_comments return connection_template def extract_comments_mapping(keys, doc): comments_map = {} for key in keys: try: param_pattern = rf":param {key}: (.*)" key_description = " ".join(re.findall(param_pattern, doc)) type_pattern = rf":type {key}: (.*)" key_type = " ".join(re.findall(type_pattern, doc)).rstrip(".") if key_type and key_description: comments_map[key] = " ".join([key_type + " type.", key_description]) elif key_type: comments_map[key] = key_type + " type." elif key_description: comments_map[key] = key_description except re.error: print("An error occurred when extract comments mapping.") return comments_map
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/multimedia_utils.py
import base64 import os import re import uuid from functools import partial from pathlib import Path from typing import Any, Callable, Dict from urllib.parse import urlparse import requests from promptflow._utils._errors import InvalidImageInput, LoadMultimediaDataError from promptflow.contracts.flow import FlowInputDefinition from promptflow.contracts.multimedia import Image, PFBytes from promptflow.contracts.tool import ValueType from promptflow.exceptions import ErrorTarget MIME_PATTERN = re.compile(r"^data:image/(.*);(path|base64|url)$") def _get_extension_from_mime_type(mime_type: str): ext = mime_type.split("/")[-1] if ext == "*": return None return ext def is_multimedia_dict(multimedia_dict: dict): if len(multimedia_dict) != 1: return False key = list(multimedia_dict.keys())[0] if re.match(MIME_PATTERN, key): return True return False def _get_multimedia_info(key: str): match = re.match(MIME_PATTERN, key) if match: return match.group(1), match.group(2) return None, None def _is_url(value: str): try: result = urlparse(value) return all([result.scheme, result.netloc]) except ValueError: return False def _is_base64(value: str): base64_regex = re.compile(r"^([A-Za-z0-9+/]{4})*(([A-Za-z0-9+/]{2})*(==|[A-Za-z0-9+/]=)?)?$") if re.match(base64_regex, value): return True return False def _create_image_from_file(f: Path, mime_type: str = None): with open(f, "rb") as fin: return Image(fin.read(), mime_type=mime_type) def _create_image_from_base64(base64_str: str, mime_type: str = None): image_bytes = base64.b64decode(base64_str) return Image(image_bytes, mime_type=mime_type) def _create_image_from_url(url: str, mime_type: str = None): response = requests.get(url) if response.status_code == 200: return Image(response.content, mime_type=mime_type, source_url=url) else: raise InvalidImageInput( message_format="Failed to fetch image from URL: {url}. Error code: {error_code}. " "Error message: {error_message}.", target=ErrorTarget.EXECUTOR, url=url, error_code=response.status_code, error_message=response.text, ) def _create_image_from_dict(image_dict: dict): for k, v in image_dict.items(): format, resource = _get_multimedia_info(k) if resource == "path": return _create_image_from_file(Path(v), mime_type=f"image/{format}") elif resource == "base64": if _is_base64(v): return _create_image_from_base64(v, mime_type=f"image/{format}") else: raise InvalidImageInput( message_format=f"Invalid base64 image: {v}.", target=ErrorTarget.EXECUTOR, ) elif resource == "url": return _create_image_from_url(v, mime_type=f"image/{format}") else: raise InvalidImageInput( message_format=f"Unsupported image resource: {resource}. " "Supported Resources are [path, base64, url].", target=ErrorTarget.EXECUTOR, ) def _create_image_from_string(value: str): if _is_base64(value): return _create_image_from_base64(value) elif _is_url(value): return _create_image_from_url(value) else: return _create_image_from_file(Path(value)) def create_image(value: any): if isinstance(value, PFBytes): return value elif isinstance(value, dict): if is_multimedia_dict(value): return _create_image_from_dict(value) else: raise InvalidImageInput( message_format="Invalid image input format. The image input should be a dictionary like: " "{{data:image/<image_type>;[path|base64|url]: <image_data>}}.", target=ErrorTarget.EXECUTOR, ) elif isinstance(value, str): if not value: raise InvalidImageInput( message_format="The image input should not be empty.", target=ErrorTarget.EXECUTOR ) return _create_image_from_string(value) else: raise InvalidImageInput( message_format=f"Unsupported image input type: {type(value)}. " "The image inputs should be a string or a dictionary.", target=ErrorTarget.EXECUTOR, ) def _save_image_to_file( image: Image, file_name: str, folder_path: Path, relative_path: Path = None, use_absolute_path=False ): ext = _get_extension_from_mime_type(image._mime_type) file_name = f"{file_name}.{ext}" if ext else file_name image_path = (relative_path / file_name).as_posix() if relative_path else file_name if use_absolute_path: image_path = Path(folder_path / image_path).resolve().as_posix() image_reference = {f"data:{image._mime_type};path": image_path} path = folder_path / relative_path if relative_path else folder_path os.makedirs(path, exist_ok=True) with open(os.path.join(path, file_name), "wb") as file: file.write(image) return image_reference def get_file_reference_encoder(folder_path: Path, relative_path: Path = None, *, use_absolute_path=False) -> Callable: def pfbytes_file_reference_encoder(obj): """Dumps PFBytes to a file and returns its reference.""" if obj.source_url: return {f"data:{obj._mime_type};url": obj.source_url} if isinstance(obj, PFBytes): file_name = str(uuid.uuid4()) # If use_absolute_path is True, the image file path in image dictionary will be absolute path. return _save_image_to_file(obj, file_name, folder_path, relative_path, use_absolute_path) raise TypeError(f"Not supported to dump type '{type(obj).__name__}'.") return pfbytes_file_reference_encoder def default_json_encoder(obj): if isinstance(obj, PFBytes): return str(obj) else: raise TypeError(f"Object of type {type(obj).__name__} is not JSON serializable") def persist_multimedia_data(value: Any, base_dir: Path, sub_dir: Path = None): pfbytes_file_reference_encoder = get_file_reference_encoder(base_dir, sub_dir) serialization_funcs = {Image: partial(Image.serialize, **{"encoder": pfbytes_file_reference_encoder})} return _process_recursively(value, process_funcs=serialization_funcs) def convert_multimedia_data_to_base64(value: Any, with_type=False, dict_type=False): to_base64_funcs = {PFBytes: partial(PFBytes.to_base64, **{"with_type": with_type, "dict_type": dict_type})} return _process_recursively(value, process_funcs=to_base64_funcs) # TODO: Move this function to a more general place and integrate serialization to this function. def _process_recursively(value: Any, process_funcs: Dict[type, Callable] = None, inplace: bool = False) -> dict: if process_funcs: for cls, f in process_funcs.items(): if isinstance(value, cls): return f(value) if isinstance(value, list): if inplace: for i in range(len(value)): value[i] = _process_recursively(value[i], process_funcs, inplace) else: return [_process_recursively(v, process_funcs, inplace) for v in value] elif isinstance(value, dict): if inplace: for k, v in value.items(): value[k] = _process_recursively(v, process_funcs, inplace) else: return {k: _process_recursively(v, process_funcs, inplace) for k, v in value.items()} return value def load_multimedia_data(inputs: Dict[str, FlowInputDefinition], line_inputs: dict): updated_inputs = dict(line_inputs or {}) for key, value in inputs.items(): try: if value.type == ValueType.IMAGE: if isinstance(updated_inputs[key], list): # For aggregation node, the image input is a list. updated_inputs[key] = [create_image(item) for item in updated_inputs[key]] else: updated_inputs[key] = create_image(updated_inputs[key]) elif value.type == ValueType.LIST or value.type == ValueType.OBJECT: updated_inputs[key] = load_multimedia_data_recursively(updated_inputs[key]) except Exception as ex: error_type_and_message = f"({ex.__class__.__name__}) {ex}" raise LoadMultimediaDataError( message_format="Failed to load image for input '{key}': {error_type_and_message}", key=key, error_type_and_message=error_type_and_message, target=ErrorTarget.EXECUTOR, ) from ex return updated_inputs def load_multimedia_data_recursively(value: Any): return _process_multimedia_dict_recursively(value, _create_image_from_dict) def resolve_multimedia_data_recursively(input_dir: Path, value: Any): process_func = partial(resolve_image_path, **{"input_dir": input_dir}) return _process_multimedia_dict_recursively(value, process_func) def _process_multimedia_dict_recursively(value: Any, process_func: Callable) -> dict: if isinstance(value, list): return [_process_multimedia_dict_recursively(item, process_func) for item in value] elif isinstance(value, dict): if is_multimedia_dict(value): return process_func(**{"image_dict": value}) else: return {k: _process_multimedia_dict_recursively(v, process_func) for k, v in value.items()} else: return value def resolve_image_path(input_dir: Path, image_dict: dict): """Resolve image path to absolute path in image dict""" input_dir = input_dir.parent if input_dir.is_file() else input_dir if is_multimedia_dict(image_dict): for key in image_dict: _, resource = _get_multimedia_info(key) if resource == "path": image_dict[key] = str(input_dir / image_dict[key]) return image_dict
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- """This is a common util file. !!!Please do not include any project related import.!!! """ import contextlib import contextvars import functools import importlib import json import logging import os import re import time import traceback from datetime import datetime from pathlib import Path from typing import Any, Dict, Iterable, Iterator, List, Optional, TypeVar, Union from promptflow._constants import DEFAULT_ENCODING T = TypeVar("T") class AttrDict(dict): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def __getattr__(self, item): if item in self: return self.__getitem__(item) return super().__getattribute__(item) def camel_to_snake(text: str) -> Optional[str]: text = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", text) return re.sub("([a-z0-9])([A-Z])", r"\1_\2", text).lower() class DateTimeEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, datetime): return o.isoformat() return json.JSONEncoder.default(self, o) def is_json_serializable(value: Any) -> bool: try: json.dumps(value) return True except TypeError: return False def load_json(file_path: Union[str, Path]) -> dict: if os.path.getsize(file_path) > 0: with open(file_path, "r") as f: return json.load(f) return {} def dump_list_to_jsonl(file_path: Union[str, Path], list_data: List[Dict]): with open(file_path, "w", encoding=DEFAULT_ENCODING) as jsonl_file: for data in list_data: json.dump(data, jsonl_file, ensure_ascii=False) jsonl_file.write("\n") def transpose(values: List[Dict[str, Any]], keys: Optional[List] = None) -> Dict[str, List]: keys = keys or list(values[0].keys()) return {key: [v.get(key) for v in values] for key in keys} def reverse_transpose(values: Dict[str, List]) -> List[Dict[str, Any]]: # Setup a result list same len with values value_lists = list(values.values()) _len = len(value_lists[0]) if any(len(value_list) != _len for value_list in value_lists): raise Exception(f"Value list of each key must have same length, please check {values!r}.") result = [] for i in range(_len): result.append({}) for key, vals in values.items(): for _idx, val in enumerate(vals): result[_idx][key] = val return result def deprecated(f=None, replace=None, version=None): if f is None: return functools.partial(deprecated, replace=replace, version=version) msg = [f"Function {f.__qualname__!r} is deprecated."] if version: msg.append(f"Deprecated since version {version}.") if replace: msg.append(f"Use {replace!r} instead.") msg = " ".join(msg) @functools.wraps(f) def wrapper(*args, **kwargs): logging.warning(msg) return f(*args, **kwargs) return wrapper def try_import(module, error_message, raise_error=True): try: importlib.import_module(module) except ImportError as e: ex_message = f"{error_message} Root cause: {e!r}" logging.warning(ex_message) if raise_error: raise Exception(ex_message) def is_in_ci_pipeline(): if os.environ.get("IS_IN_CI_PIPELINE") == "true": return True return False def count_and_log_progress( inputs: Iterable[T], logger: logging.Logger, total_count: int, formatter="{count} / {total_count} finished." ) -> Iterator[T]: log_interval = max(int(total_count / 10), 1) count = 0 for item in inputs: count += 1 if count % log_interval == 0 or count == total_count: logger.info(formatter.format(count=count, total_count=total_count)) yield item def log_progress( run_start_time: datetime, logger: logging.Logger, count: int, total_count: int, formatter="Finished {count} / {total_count} lines.", *, last_log_count: Optional[int] = None, ): # Calculate log_interval to determine when to log progress. # If total_count is less than 100, log every 10% of total_count; otherwise, log every 10 lines. log_interval = min(10, max(int(total_count / 10), 1)) # If last_log_count is not None, determine whether to log based on whether the difference # between the current count and the previous count exceeds log_interval. # Otherwise, decide based on whether the current count is evenly divisible by log_interval. if last_log_count: log_flag = (count - last_log_count) >= log_interval else: log_flag = count % log_interval == 0 if count > 0 and (log_flag or count == total_count): average_execution_time = round((datetime.utcnow().timestamp() - run_start_time.timestamp()) / count, 2) estimated_execution_time = round(average_execution_time * (total_count - count), 2) logger.info(formatter.format(count=count, total_count=total_count)) logger.info( f"Average execution time for completed lines: {average_execution_time} seconds. " f"Estimated time for incomplete lines: {estimated_execution_time} seconds." ) def extract_user_frame_summaries(frame_summaries: List[traceback.FrameSummary]): from promptflow import _core core_folder = os.path.dirname(_core.__file__) for i in range(len(frame_summaries) - 1): cur_file = frame_summaries[i].filename next_file = frame_summaries[i + 1].filename # If the current frame is in _core folder and the next frame is not in _core folder # then we can say that the next frame is in user code. if cur_file.startswith(core_folder) and not next_file.startswith(core_folder): return frame_summaries[i + 1 :] return frame_summaries def format_user_stacktrace(frame): # TODO: Maybe we can filter all frames from our code base to make it clean? frame_summaries = traceback.extract_stack(frame) user_frame_summaries = extract_user_frame_summaries(frame_summaries) return traceback.format_list(user_frame_summaries) def generate_elapsed_time_messages(func_name: str, start_time: float, interval: int, thread_id: int): import sys frames = sys._current_frames() if thread_id not in frames: thread_msg = ( f"thread {thread_id} cannot be found in sys._current_frames, " + "maybe it has been terminated due to unexpected errors." ) else: frame = frames[thread_id] stack_msgs = format_user_stacktrace(frame) stack_msg = "".join(stack_msgs) thread_msg = f"stacktrace of thread {thread_id}:\n{stack_msg}" elapse_time = time.perf_counter() - start_time # Make elapse time a multiple of interval. elapse_time = round(elapse_time / interval) * interval msgs = [f"{func_name} has been running for {elapse_time:.0f} seconds, {thread_msg}"] return msgs def set_context(context: contextvars.Context): for var, value in context.items(): var.set(value) def convert_inputs_mapping_to_param(inputs_mapping: dict): """Use this function to convert inputs_mapping to a string that can be passed to component as a string parameter, we have to do this since we can't pass a dict as a parameter to component. # TODO: Finalize the format of inputs_mapping """ return ",".join([f"{k}={v}" for k, v in inputs_mapping.items()]) @contextlib.contextmanager def environment_variable_overwrite(key, val): if key in os.environ.keys(): backup_value = os.environ[key] else: backup_value = None os.environ[key] = val try: yield finally: if backup_value: os.environ[key] = backup_value else: os.environ.pop(key) def resolve_dir_to_absolute(base_dir: Union[str, Path], sub_dir: Union[str, Path]) -> Path: """Resolve directory to absolute path with base_dir as root""" path = sub_dir if isinstance(sub_dir, Path) else Path(sub_dir) if not path.is_absolute(): base_dir = base_dir if isinstance(base_dir, Path) else Path(base_dir) path = base_dir / sub_dir return path def parse_ua_to_dict(ua): """Parse string user agent to dict with name as ua name and value as ua version.""" ua_dict = {} ua_list = ua.split(" ") for item in ua_list: if item: key, value = item.split("/") ua_dict[key] = value return ua_dict # TODO: Add "conditions" parameter to pass in a list of lambda functions # to check if the environment variable is valid. def get_int_env_var(env_var_name, default_value=None): """ The function `get_int_env_var` retrieves an integer environment variable value, with an optional default value if the variable is not set or cannot be converted to an integer. :param env_var_name: The name of the environment variable you want to retrieve the value of :param default_value: The default value is the value that will be returned if the environment variable is not found or if it cannot be converted to an integer :return: an integer value. """ try: return int(os.environ.get(env_var_name, default_value)) except Exception: return default_value def prompt_y_n(msg, default=None): if default not in [None, "y", "n"]: raise ValueError("Valid values for default are 'y', 'n' or None") y = "Y" if default == "y" else "y" n = "N" if default == "n" else "n" while True: ans = prompt_input("{} ({}/{}): ".format(msg, y, n)) if ans.lower() == n.lower(): return False if ans.lower() == y.lower(): return True if default and not ans: return default == y.lower() def prompt_input(msg): return input("\n===> " + msg)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/version_hint_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import datetime import json import logging from promptflow._constants import (LAST_HINT_TIME, LAST_CHECK_TIME, PF_VERSION_CHECK, CLI_PACKAGE_NAME, HINT_INTERVAL_DAY, GET_PYPI_INTERVAL_DAY, LATEST_VERSION, CURRENT_VERSION) from promptflow._sdk._constants import HOME_PROMPT_FLOW_DIR HINT_ACTIVITY_NAME = ["pf.flows.test", "pf.runs.create_or_update", "pfazure.flows.create_or_update", "pfazure.runs.create_or_update"] logger = logging.getLogger(__name__) def get_cached_versions(): from promptflow._sdk._utils import read_write_by_user (HOME_PROMPT_FLOW_DIR / PF_VERSION_CHECK).touch(mode=read_write_by_user(), exist_ok=True) with open(HOME_PROMPT_FLOW_DIR / PF_VERSION_CHECK, "r") as f: try: cached_versions = json.load(f) except json.decoder.JSONDecodeError: cached_versions = {} return cached_versions def dump_cached_versions(cached_versions): with open(HOME_PROMPT_FLOW_DIR / PF_VERSION_CHECK, "w") as f: json.dump(cached_versions, f) def get_latest_version_from_pypi(package_name): pypi_url = f"https://pypi.org/pypi/{package_name}/json" try: import requests response = requests.get(pypi_url, timeout=3) if response.status_code == 200: data = response.json() latest_version = data["info"]["version"] return latest_version else: return None except Exception as ex: # pylint: disable=broad-except logger.debug(f"Failed to get the latest version from '{pypi_url}'. {str(ex)}") return None def check_latest_version(): """ Get the latest versions from a cached file""" cached_versions = get_cached_versions() last_check_time = datetime.datetime.strptime(cached_versions[LAST_CHECK_TIME], '%Y-%m-%d %H:%M:%S.%f') \ if LAST_CHECK_TIME in cached_versions else None if last_check_time is None or (datetime.datetime.now() > last_check_time + datetime.timedelta(days=GET_PYPI_INTERVAL_DAY)): version = get_latest_version_from_pypi(CLI_PACKAGE_NAME) if version is not None: cached_versions[LATEST_VERSION] = version cached_versions[LAST_CHECK_TIME] = str(datetime.datetime.now()) dump_cached_versions(cached_versions) def hint_for_update(): """ Check if there is a new version of prompt flow available every 7 days. IF yes, log debug info to hint customer to upgrade package. """ cached_versions = get_cached_versions() last_hint_time = datetime.datetime.strptime( cached_versions[LAST_HINT_TIME], '%Y-%m-%d %H:%M:%S.%f' ) if LAST_HINT_TIME in cached_versions else None if last_hint_time is None or (datetime.datetime.now() > last_hint_time + datetime.timedelta(days=HINT_INTERVAL_DAY)): from promptflow._sdk._utils import get_promptflow_sdk_version cached_versions[CURRENT_VERSION] = get_promptflow_sdk_version() if LATEST_VERSION in cached_versions: from packaging.version import parse if parse(cached_versions[CURRENT_VERSION]) < parse(cached_versions[LATEST_VERSION]): cached_versions[LAST_HINT_TIME] = str(datetime.datetime.now()) message = (f"New prompt flow version available: promptflow-{cached_versions[LATEST_VERSION]}. Running " f"'pf upgrade' to update CLI.") logger.debug(message) dump_cached_versions(cached_versions)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/async_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import asyncio from concurrent.futures import ThreadPoolExecutor def _has_running_loop() -> bool: """Check if the current thread has a running event loop.""" # When using asyncio.get_running_loop(), a RuntimeError is raised if there is no running event loop. # So, we use a try-catch block to determine whether there is currently an event loop in place. # # Note that this is the only way to check whether there is a running loop now, see: # https://docs.python.org/3/library/asyncio-eventloop.html?highlight=get_running_loop#asyncio.get_running_loop try: asyncio.get_running_loop() return True except RuntimeError: return False def async_run_allowing_running_loop(async_func, *args, **kwargs): """Run an async function in a new thread, allowing the current thread to have a running event loop. When run in an async environment (e.g., in a notebook), because each thread allows only one event loop, using asyncio.run directly leads to a RuntimeError ("asyncio.run() cannot be called from a running event loop"). To address this issue, we add a check for the event loop here. If the current thread already has an event loop, we run _exec_batch in a new thread; otherwise, we run it in the current thread. """ if _has_running_loop(): with ThreadPoolExecutor(1) as executor: return executor.submit(lambda: asyncio.run(async_func(*args, **kwargs))).result() else: return asyncio.run(async_func(*args, **kwargs))
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/yaml_utils.py
from io import StringIO from os import PathLike from typing import IO, AnyStr, Dict, Optional, Union from ruamel.yaml import YAML, YAMLError from promptflow._constants import DEFAULT_ENCODING from promptflow._utils._errors import YamlParseError def load_yaml(source: Optional[Union[AnyStr, PathLike, IO]]) -> Dict: # null check - just return an empty dict. # Certain CLI commands rely on this behavior to produce a resource # via CLI, which is then populated through CLArgs. """Load a local YAML file or a readable stream object. .. note:: 1. For a local file yaml .. code-block:: python yaml_path = "path/to/yaml" content = load_yaml(yaml_path) 2. For a readable stream object .. code-block:: python with open("path/to/yaml", "r", encoding="utf-8") as f: content = load_yaml(f) :param source: The relative or absolute path to the local file, or a readable stream object. :type source: str :return: A dictionary representation of the local file's contents. :rtype: Dict """ if source is None: return {} # pylint: disable=redefined-builtin input = None must_open_file = False try: # check source type by duck-typing it as an IOBase readable = source.readable() if not readable: # source is misformatted stream or file msg = "File Permissions Error: The already-open \n\n inputted file is not readable." raise Exception(msg) # source is an already-open stream or file, we can read() from it directly. input = source except AttributeError: # source has no writable() function, assume it's a string or file path. must_open_file = True if must_open_file: # If supplied a file path, open it. try: input = open(source, "r", encoding=DEFAULT_ENCODING) except OSError: # FileNotFoundError introduced in Python 3 msg = "No such file or directory: {}" raise Exception(msg.format(source)) # input should now be a readable file or stream. Parse it. cfg = {} try: yaml = YAML() yaml.preserve_quotes = True cfg = yaml.load(input) except YAMLError as e: msg = f"Error while parsing yaml file: {source} \n\n {str(e)}" raise Exception(msg) finally: if must_open_file: input.close() return cfg def load_yaml_string(yaml_string: str): """Load a yaml string. .. code-block:: python yaml_string = "some yaml string" object = load_yaml_string(yaml_string) :param yaml_string: A yaml string. :type yaml_string: str """ yaml = YAML() yaml.preserve_quotes = True return yaml.load(yaml_string) def dump_yaml(*args, **kwargs): """Dump data to a yaml string or stream. .. note:: 1. Dump to a yaml string .. code-block:: python data = {"key": "value"} yaml_string = dump_yaml(data) 2. Dump to a stream .. code-block:: python data = {"key": "value"} with open("path/to/yaml", "w", encoding="utf-8") as f: dump_yaml(data, f) """ yaml = YAML() yaml.default_flow_style = False # when using with no stream parameter but just the data, dump to yaml string and return if len(args) == 1: string_stream = StringIO() yaml.dump(args[0], string_stream, **kwargs) output_string = string_stream.getvalue() string_stream.close() return output_string # when using with stream parameter, dump to stream. e.g.: # open('test.yaml', 'w', encoding='utf-8') as f: # dump_yaml(data, f) elif len(args) == 2: return yaml.dump(*args, **kwargs) else: raise YamlParseError("Only 1 or 2 positional arguments are allowed for dump yaml util function.")
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/credential_scrubber.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import re class CredentialScrubber: """Scrub sensitive information in string.""" PLACE_HOLDER = "**data_scrubbed**" LENGTH_THRESHOLD = 2 def __init__(self): self.default_regex_set = set( [ r"(?<=sig=)[^\s;&]+", # Replace signature. r"(?<=key=)[^\s;&]+", # Replace key. ] ) self.default_str_set = set() self.custom_regex_set = set() self.custom_str_set = set() def scrub(self, input: str): """Replace sensitive information in input string with PLACE_HOLDER. For example, for input string: "print accountkey=accountKey", the output will be: "print accountkey=**data_scrubbed**" """ output = input regex_set = self.default_regex_set.union(self.custom_regex_set) for regex in regex_set: output = re.sub(regex, self.PLACE_HOLDER, output, flags=re.IGNORECASE) str_set = self.default_str_set.union(self.custom_str_set) for s in str_set: output = output.replace(s, self.PLACE_HOLDER) return output def add_regex(self, pattern: str): # policy: http://policheck.azurewebsites.net/Pages/TermInfo.aspx?LCID=9&TermID=79458 """Add regex pattern to checklist.""" self.custom_regex_set.add(pattern) def add_str(self, s: str): """Add string to checklist. Only scrub string with length > LENGTH_THRESHOLD. """ if s is None: return if len(s) <= self.LENGTH_THRESHOLD: return self.custom_str_set.add(s) def clear(self): """Clear custom regex and string set.""" self.custom_regex_set = set() self.custom_str_set = set()
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/retry_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import time from functools import wraps from typing import Tuple, Type, Union from requests import Response from promptflow._utils.logger_utils import LoggerFactory logger = LoggerFactory.get_logger(__name__) def retry(exception_to_check: Union[Type[Exception], Tuple[Type[Exception], ...]], tries=4, delay=3, backoff=2): """ From https://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/ Retry calling the decorated function using an exponential backoff. http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/ original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry :param exception_to_check: the exception to check. may be a tuple of exceptions to check :type exception_to_check: Exception or tuple :param tries: number of times to try (not retry) before giving up :type tries: int :param delay: initial delay between retries in seconds :type delay: int :param backoff: backoff multiplier e.g. value of 2 will double the delay each retry :type backoff: int :param logger: log the retry action if specified :type logger: logging.Logger """ def deco_retry(f): @wraps(f) def f_retry(*args, **kwargs): retry_times, delay_seconds = tries, delay while retry_times > 1: try: logger.debug("Running %s, %d more tries to go.", str(f), retry_times) return f(*args, **kwargs) except exception_to_check: time.sleep(delay_seconds) retry_times -= 1 delay_seconds *= backoff logger.warning("%s, Retrying in %d seconds...", str(exception_to_check), delay_seconds) return f(*args, **kwargs) return f_retry # true decorator return deco_retry HTTP_SAFE_CODES = set(range(506)) - {408, 429, 500, 502, 503, 504} HTTP_RETRY_CODES = set(range(999)) - HTTP_SAFE_CODES def http_retry_wrapper(f, tries=4, delay=3, backoff=2): """ :param f: function to be retried, should return a Response object. :type f: Callable :param tries: number of times to try (not retry) before giving up :type tries: int :param delay: initial delay between retries in seconds :type delay: int :param backoff: backoff multiplier e.g. value of 2 will double the delay each retry :type backoff: int """ @wraps(f) def f_retry(*args, **kwargs): retry_times, delay_seconds = tries, delay while retry_times > 1: result = f(*args, **kwargs) if not isinstance(result, Response): logger.debug(f"Not a retryable function, expected return type {Response}, got {type(result)}.") return result if result.status_code not in HTTP_RETRY_CODES: return result logger.warning( f"Retryable error code {result.status_code} returned, retrying in {delay_seconds} seconds. " f"Function {f.__name__}, Reason: {result.reason}" ) time.sleep(delay_seconds) retry_times -= 1 delay_seconds *= backoff return f(*args, **kwargs) return f_retry
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/exception_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import json import os from datetime import datetime from enum import Enum from traceback import TracebackException, format_tb from types import TracebackType, FrameType from promptflow.exceptions import PromptflowException, SystemErrorException, UserErrorException, ValidationException ADDITIONAL_INFO_USER_EXECUTION_ERROR = "ToolExecutionErrorDetails" ADDITIONAL_INFO_USER_CODE_STACKTRACE = "UserCodeStackTrace" CAUSE_MESSAGE = "\nThe above exception was the direct cause of the following exception:\n\n" CONTEXT_MESSAGE = "\nDuring handling of the above exception, another exception occurred:\n\n" TRACEBACK_MESSAGE = "Traceback (most recent call last):\n" class RootErrorCode: USER_ERROR = "UserError" SYSTEM_ERROR = "SystemError" class ResponseCode(str, Enum): SUCCESS = "200" ACCEPTED = "202" REDIRECTION = "300" CLIENT_ERROR = "400" SERVICE_ERROR = "500" UNKNOWN = "0" class ErrorResponse: """A class that represents the response body when an error occurs. It follows the following specification: https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses """ def __init__(self, error_dict): self._error_dict = error_dict @staticmethod def from_error_dict(error_dict): """Create an ErrorResponse from an error dict. The error dict which usually is generated by ExceptionPresenter.create(exception).to_dict() """ return ErrorResponse(error_dict) @staticmethod def from_exception(ex: Exception, *, include_debug_info=False): presenter = ExceptionPresenter.create(ex) error_dict = presenter.to_dict(include_debug_info=include_debug_info) return ErrorResponse(error_dict) @property def message(self): return self._error_dict.get("message", "") @property def response_code(self): """Given the error code, return the corresponding http response code.""" root_error_code = self._error_dict.get("code") return ResponseCode.CLIENT_ERROR if root_error_code == RootErrorCode.USER_ERROR else ResponseCode.SERVICE_ERROR @property def additional_info(self): """Return the additional info of the error. The additional info is defined in the error response. It is stored as a list of dict, each of which contains a "type" and "info" field. We change the list of dict to a dict of dict for easier access. """ result = {} list_of_dict = self._error_dict.get("additionalInfo") if not list_of_dict or not isinstance(list_of_dict, list): return result for item in list_of_dict: # We just ignore the item if it is not a dict or does not contain the required fields. if not isinstance(item, dict): continue name = item.get("type") info = item.get("info") if not name or not info: continue result[name] = info return result def get_additional_info(self, name): """Get the additional info by name.""" return self.additional_info.get(name) def get_user_execution_error_info(self): """Get user tool execution error info from additional info.""" user_execution_error_info = self.get_additional_info(ADDITIONAL_INFO_USER_EXECUTION_ERROR) if not user_execution_error_info or not isinstance(user_execution_error_info, dict): return {} return user_execution_error_info def to_dict(self): from promptflow._core.operation_context import OperationContext return { "error": self._error_dict, "correlation": None, # TODO: to be implemented "environment": None, # TODO: to be implemented "location": None, # TODO: to be implemented "componentName": OperationContext.get_instance().get_user_agent(), "time": datetime.utcnow().isoformat(), } def to_simplified_dict(self): return { "error": { "code": self._error_dict.get("code"), "message": self._error_dict.get("message"), } } @property def error_codes(self): error = self._error_dict error_codes = [] while error is not None: code = error.get("code") if code is not None: error_codes.append(code) error = error.get("innerError") else: break return error_codes @property def error_code_hierarchy(self): """Get the code hierarchy from error dict.""" return "/".join(self.error_codes) @property def innermost_error_code(self): error_codes = self.error_codes if error_codes: return error_codes[-1] return None class ExceptionPresenter: """A class that can extract information from the exception instance. It is designed to work for both PromptflowException and other exceptions. """ def __init__(self, ex: Exception): self._ex = ex @staticmethod def create(ex: Exception): if isinstance(ex, PromptflowException): return PromptflowExceptionPresenter(ex) return ExceptionPresenter(ex) @property def formatted_traceback(self): te = TracebackException.from_exception(self._ex) return "".join(te.format()) @property def debug_info(self): return self.build_debug_info(self._ex) def build_debug_info(self, ex: Exception): inner_exception: dict = None stack_trace = TRACEBACK_MESSAGE + "".join(format_tb(ex.__traceback__)) if ex.__cause__ is not None: inner_exception = self.build_debug_info(ex.__cause__) stack_trace = CAUSE_MESSAGE + stack_trace elif ex.__context__ is not None and not ex.__suppress_context__: inner_exception = self.build_debug_info(ex.__context__) stack_trace = CONTEXT_MESSAGE + stack_trace return { "type": ex.__class__.__qualname__, "message": str(ex), "stackTrace": stack_trace, "innerException": inner_exception, } @property def error_codes(self): """The hierarchy of the error codes. We follow the "Microsoft REST API Guidelines" to define error codes in a hierarchy style. See the below link for details: https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses This method returns the error codes in a list. It will be converted into a nested json format by error_code_recursed. """ return [infer_error_code_from_class(SystemErrorException), self._ex.__class__.__name__] @property def error_code_recursed(self): """Returns a dict of the error codes for this exception. It is populated in a recursive manner, using the source from `error_codes` property. i.e. For PromptflowException, such as ToolExcutionError which inherits from UserErrorException, The result would be: { "code": "UserError", "innerError": { "code": "ToolExecutionError", "innerError": None, }, } For other exception types, such as ValueError, the result would be: { "code": "SystemError", "innerError": { "code": "ValueError", "innerError": None, }, } """ current_error = None reversed_error_codes = reversed(self.error_codes) if self.error_codes else [] for code in reversed_error_codes: current_error = { "code": code, "innerError": current_error, } return current_error def to_dict(self, *, include_debug_info=False): """Return a dict representation of the exception. This dict specification corresponds to the specification of the Microsoft API Guidelines: https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses Note that this dict represents the "error" field in the response body of the API. The whole error response is then populated in another place outside of this class. """ if isinstance(self._ex, JsonSerializedPromptflowException): return self._ex.to_dict(include_debug_info=include_debug_info) # Otherwise, return general dict representation of the exception. result = {"message": str(self._ex), "messageFormat": "", "messageParameters": {}} result.update(self.error_code_recursed) if include_debug_info: result["debugInfo"] = self.debug_info return result class PromptflowExceptionPresenter(ExceptionPresenter): @property def error_codes(self): """The hierarchy of the error codes. We follow the "Microsoft REST API Guidelines" to define error codes in a hierarchy style. See the below link for details: https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses For subclass of PromptflowException, use the ex.error_codes directly. For PromptflowException (not a subclass), the ex.error_code is None. The result should be: ["SystemError", {inner_exception type name if exist}] """ if self._ex.error_codes: return self._ex.error_codes # For PromptflowException (not a subclass), the ex.error_code is None. # Handle this case specifically. error_codes = [infer_error_code_from_class(SystemErrorException)] if self._ex.inner_exception: error_codes.append(infer_error_code_from_class(self._ex.inner_exception.__class__)) return error_codes def to_dict(self, *, include_debug_info=False): result = { "message": self._ex.message, "messageFormat": self._ex.message_format, "messageParameters": self._ex.serializable_message_parameters, "referenceCode": self._ex.reference_code, } result.update(self.error_code_recursed) if self._ex.additional_info: result["additionalInfo"] = [{"type": k, "info": v} for k, v in self._ex.additional_info.items()] if include_debug_info: result["debugInfo"] = self.debug_info return result class JsonSerializedPromptflowException(Exception): """Json serialized PromptflowException. This exception only has one argument message to avoid the argument missing error when load/dump with pickle in multiprocessing. Ref: https://bugs.python.org/issue32696 :param message: A Json serialized message describing the error. :type message: str """ def __init__(self, message): self.message = message super().__init__(self.message) def __str__(self): return self.message def to_dict(self, *, include_debug_info=False): # Return a dict representation of the inner exception. error_dict = json.loads(self.message) # The original serialized error might contain debugInfo. # We pop it out if include_debug_info is set to False. if not include_debug_info and "debugInfo" in error_dict: error_dict.pop("debugInfo") return error_dict def get_tb_next(tb: TracebackType, next_cnt: int): """Return the nth tb_next of input tb. If the tb does not have n tb_next, return the last tb which has a value. n = next_cnt """ while tb.tb_next and next_cnt > 0: tb = tb.tb_next next_cnt -= 1 return tb def last_frame_info(ex: Exception): """Return the line number where the error occurred.""" if ex: tb = TracebackException.from_exception(ex) last_frame = tb.stack[-1] if tb.stack else None if last_frame: return { "filename": last_frame.filename, "lineno": last_frame.lineno, "name": last_frame.name, } return {} def infer_error_code_from_class(cls): # Python has a built-in SystemError if cls == SystemErrorException: return RootErrorCode.SYSTEM_ERROR if cls == UserErrorException: return RootErrorCode.USER_ERROR if cls == ValidationException: return "ValidationError" return cls.__name__ def is_pf_core_frame(frame: FrameType): """Check if the frame is from promptflow core code.""" from promptflow import _core folder_of_core = os.path.dirname(_core.__file__) return folder_of_core in frame.f_code.co_filename def remove_suffix(text: str, suffix: str = None): """ Given a string, removes specified suffix, if it has. >>> remove_suffix('hello world', 'world') 'hello ' >>> remove_suffix('hello world', 'hello ') 'hello world' >>> remove_suffix('NoColumnFoundError', 'Error') 'NoColumnFound' :param text: string from which prefix will be removed. :param suffix: suffix to be removed. :return: string removed suffix. """ if not text or not suffix: return text if not text.endswith(suffix): return text return text[:-len(suffix)]
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/tool_utils.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import importlib import inspect import logging import re from enum import Enum, EnumMeta from typing import Any, Callable, Dict, List, Union, get_args, get_origin from jinja2 import Environment, meta from promptflow._core._errors import DuplicateToolMappingError from promptflow._utils.utils import is_json_serializable from promptflow.exceptions import ErrorTarget, UserErrorException from ..contracts.tool import ConnectionType, InputDefinition, Tool, ToolFuncCallScenario, ValueType from ..contracts.types import PromptTemplate module_logger = logging.getLogger(__name__) _DEPRECATED_TOOLS = "deprecated_tools" def value_to_str(val): if val is inspect.Parameter.empty: # For empty case, default field will be skipped when dumping to json return None if val is None: # Dump default: "" in json to avoid UI validation error return "" if isinstance(val, Enum): return val.value return str(val) def resolve_annotation(anno) -> Union[str, list]: """Resolve the union annotation to type list.""" origin = get_origin(anno) if origin != Union: return anno # Optional[Type] is Union[Type, NoneType], filter NoneType out args = [arg for arg in get_args(anno) if arg != type(None)] # noqa: E721 return args[0] if len(args) == 1 else args def param_to_definition(param, gen_custom_type_conn=False) -> (InputDefinition, bool): default_value = param.default # Get value type and enum from annotation value_type = resolve_annotation(param.annotation) enum = None custom_type_conn = None # Get value type and enum from default if no annotation if default_value is not inspect.Parameter.empty and value_type == inspect.Parameter.empty: value_type = default_value.__class__ if isinstance(default_value, Enum) else type(default_value) # Extract enum for enum class if isinstance(value_type, EnumMeta): enum = [str(option.value) for option in value_type] value_type = str is_connection = False if ConnectionType.is_connection_value(value_type): if ConnectionType.is_custom_strong_type(value_type): typ = ["CustomConnection"] custom_type_conn = [value_type.__name__] else: typ = [value_type.__name__] is_connection = True elif isinstance(value_type, list): if not all(ConnectionType.is_connection_value(t) for t in value_type): typ = [ValueType.OBJECT] else: custom_connection_added = False typ = [] custom_type_conn = [] for t in value_type: # Add 'CustomConnection' to typ list when custom strong type connection exists. Collect all custom types if ConnectionType.is_custom_strong_type(t): if not custom_connection_added: custom_connection_added = True typ.append("CustomConnection") custom_type_conn.append(t.__name__) else: if t.__name__ != "CustomConnection": typ.append(t.__name__) elif not custom_connection_added: custom_connection_added = True typ.append(t.__name__) is_connection = True else: typ = [ValueType.from_type(value_type)] # 1. Do not generate custom type when generating flow.tools.json for script tool. # Extension would show custom type if it exists. While for script tool with custom strong type connection, # we still want to show 'CustomConnection' type. # 2. Generate custom connection type when resolving tool in _tool_resolver, since we rely on it to convert the # custom connection to custom strong type connection. if not gen_custom_type_conn: custom_type_conn = None return ( InputDefinition( type=typ, default=value_to_str(default_value), description=None, enum=enum, custom_type=custom_type_conn, ), is_connection, ) def function_to_interface( f: Callable, initialize_inputs=None, gen_custom_type_conn=False, skip_prompt_template=False ) -> tuple: sign = inspect.signature(f) all_inputs = {} input_defs = {} connection_types = [] # Collect all inputs from class and func if initialize_inputs: if any(k for k in initialize_inputs if k in sign.parameters): raise Exception(f'Duplicate inputs found from {f.__name__!r} and "__init__()"!') all_inputs = {**initialize_inputs} enable_kwargs = any([param.kind == inspect.Parameter.VAR_KEYWORD for _, param in sign.parameters.items()]) all_inputs.update( { k: v for k, v in sign.parameters.items() if k != "self" and v.kind != v.VAR_KEYWORD and v.kind != v.VAR_POSITIONAL # TODO: Handle these cases } ) # Resolve inputs to definitions. for k, v in all_inputs.items(): # Get value type from annotation value_type = resolve_annotation(v.annotation) if skip_prompt_template and value_type is PromptTemplate: # custom llm tool has prompt template as input, skip it continue input_def, is_connection = param_to_definition(v, gen_custom_type_conn=gen_custom_type_conn) input_defs[k] = input_def if is_connection: connection_types.append(input_def.type) outputs = {} # Note: We don't have output definition now return input_defs, outputs, connection_types, enable_kwargs def function_to_tool_definition(f: Callable, type=None, initialize_inputs=None) -> Tool: """Translate a function to tool definition. :param f: Function to be translated. :param type: Tool type :param initialize_inputs: The initialize() func inputs get by get_initialize_inputs() when function defined in class. We will merge those inputs with f() inputs. :return: The tool definition. """ if hasattr(f, "__original_function"): f = f.__original_function inputs, outputs, _, _ = function_to_interface(f, initialize_inputs) # Hack to get class name class_name = None if "." in f.__qualname__: class_name = f.__qualname__.replace(f".{f.__name__}", "") meta_dict = { "name": f.__qualname__, "description": inspect.getdoc(f) or None, "inputs": inputs, "outputs": outputs, "class_name": class_name, "function": f.__name__, } return Tool(type=type, module=f.__module__, **meta_dict, is_builtin=True, stage="test") def get_inputs_for_prompt_template(template_str): """Get all input variable names and definitions from a jinja2 template string. : param template_str: template string : type t: str : return: the input name to InputDefinition dict : rtype t: Dict[str, ~promptflow.contracts.tool.InputDefinition] Example: >>> get_inputs_for_prompt_template( template_str="A simple prompt with no variables" ) {} >>> get_inputs_for_prompt_template( template_str="Prompt with only one string input {{str_input}}" ) {"str_input": InputDefinition(type=[ValueType.STRING])} >>> get_inputs_for_prompt_template( template_str="Prompt with image input ![image]({{image_input}}) and string input {{str_input}}" ) {"image_input": InputDefinition(type=[ValueType.IMAGE]), "str_input": InputDefinition(type=[ValueType.STRING]) """ env = Environment() template = env.parse(template_str) inputs = sorted(meta.find_undeclared_variables(template), key=lambda x: template_str.find(x)) result_dict = {i: InputDefinition(type=[ValueType.STRING]) for i in inputs} # currently we only support image type pattern = r"\!\[(\s*image\s*)\]\(\{\{\s*([^{}]+)\s*\}\}\)" matches = re.finditer(pattern, template_str) for match in matches: input_name = match.group(2).strip() result_dict[input_name] = InputDefinition([ValueType(match.group(1).strip())]) return result_dict def get_prompt_param_name_from_func(f): """Get the param name of prompt template on provider.""" return next((k for k, annotation in f.__annotations__.items() if annotation == PromptTemplate), None) def validate_dynamic_list_func_response_type(response: Any, f: str): """Verify response type is correct. The response is a list of items. Each item is a dict with the following keys: - value: for backend use. Required. - display_value: for UI display. Optional. - hyperlink: external link. Optional. - description: information icon tip. Optional. The response can not be empty. """ if not response: raise ListFunctionResponseError(f"{f} response can not be empty.") if not isinstance(response, List): raise ListFunctionResponseError(f"{f} response must be a list.") for item in response: if not isinstance(item, Dict): raise ListFunctionResponseError(f"{f} response must be a list of dict. {item} is not a dict.") if "value" not in item: raise ListFunctionResponseError(f"{f} response dict must have 'value' key.") for key, value in item.items(): if not isinstance(key, str): raise ListFunctionResponseError(f"{f} response dict key must be a string. {key} is not a string.") if not is_json_serializable(value): raise ListFunctionResponseError(f"{f} response dict value {value} is not json serializable.") if not isinstance(value, (str, int, float, list, Dict)): raise ListFunctionResponseError( f"{f} response dict value must be a string, int, float, list or dict. {value} is not supported." ) def validate_tool_func_result(func_call_scenario: str, result): if func_call_scenario == ToolFuncCallScenario.REVERSE_GENERATED_BY: if not isinstance(result, Dict): raise RetrieveToolFuncResultValidationError( f"ToolFuncCallScenario {func_call_scenario} response must be a dict. " f"{result} is not a dict." ) elif func_call_scenario == ToolFuncCallScenario.DYNAMIC_LIST: validate_dynamic_list_func_response_type(result, f"ToolFuncCallScenario {func_call_scenario}") def append_workspace_triple_to_func_input_params( func_sig_params: Dict, func_input_params_dict: Dict, ws_triple_dict: Dict[str, str] ): """Append workspace triple to func input params. :param func_sig_params: function signature parameters, full params. :param func_input_params_dict: user input param key-values for dynamic list function. :param ws_triple_dict: workspace triple dict, including subscription_id, resource_group_name, workspace_name. :return: combined func input params. """ # append workspace triple to func input params if any below condition are met: # 1. func signature has kwargs param. # 2. func signature has param named 'subscription_id','resource_group_name','workspace_name'. ws_triple_dict = ws_triple_dict if ws_triple_dict is not None else {} func_input_params_dict = func_input_params_dict if func_input_params_dict is not None else {} has_kwargs_param = any([param.kind == inspect.Parameter.VAR_KEYWORD for _, param in func_sig_params.items()]) if has_kwargs_param is False: # keep only params that are in func signature. Or run into error when calling func. avail_ws_info_dict = {k: v for k, v in ws_triple_dict.items() if k in set(func_sig_params.keys())} else: avail_ws_info_dict = ws_triple_dict # if ws triple key is in func input params, it means user has provided value for it, # do not expect implicit override. combined_func_input_params = dict(avail_ws_info_dict, **func_input_params_dict) return combined_func_input_params def load_function_from_function_path(func_path: str): """Load a function from a function path. The function path should be in the format of "module_name.function_name". """ try: module_name, func_name = func_path.rsplit(".", 1) module = importlib.import_module(module_name) f = getattr(module, func_name) if callable(f): return f else: raise FunctionPathValidationError(f"'{f}' is not callable.") except Exception as e: raise FunctionPathValidationError( f"Failed to parse function from function path: '{func_path}'. Expected format: format 'my_module.my_func'. " f"Detailed error: {e}" ) # Handling backward compatibility and generating a mapping between the previous and new tool IDs. def _find_deprecated_tools(package_tools) -> Dict[str, str]: _deprecated_tools = {} for tool_id, tool in package_tools.items(): # a list of old tool IDs that are mapped to the current tool ID. if tool and _DEPRECATED_TOOLS in tool: for old_tool_id in tool[_DEPRECATED_TOOLS]: # throw error to prompt user for manual resolution of this conflict, ensuring secure operation. if old_tool_id in _deprecated_tools: raise DuplicateToolMappingError( message_format=( "The tools '{first_tool_id}', '{second_tool_id}' are both linked to the deprecated " "tool ID '{deprecated_tool_id}'. To ensure secure operation, please either " "remove or adjust one of these tools in your environment and fix this conflict." ), first_tool_id=_deprecated_tools[old_tool_id], second_tool_id=tool_id, deprecated_tool_id=old_tool_id, target=ErrorTarget.TOOL, ) _deprecated_tools[old_tool_id] = tool_id return _deprecated_tools def _get_function_path(function): # Validate function exist if isinstance(function, str): module_name, func_name = function.rsplit(".", 1) module = importlib.import_module(module_name) func = getattr(module, func_name) func_path = function elif isinstance(function, Callable): func = function func_path = f"{function.__module__}.{function.__name__}" else: raise UserErrorException("Function has invalid type, please provide callable or function name for function.") return func, func_path class RetrieveToolFuncResultError(UserErrorException): """Base exception raised for retreive tool func result errors.""" def __init__(self, message): msg = ( f"Unable to retreive tool func result due to '{message}'. \nPlease contact the tool author/support team " f"for troubleshooting assistance." ) super().__init__(msg, target=ErrorTarget.FUNCTION_PATH) class RetrieveToolFuncResultValidationError(RetrieveToolFuncResultError): pass class DynamicListError(UserErrorException): """Base exception raised for dynamic list errors.""" def __init__(self, message): msg = ( f"Unable to display list of items due to '{message}'. \nPlease contact the tool author/support team " f"for troubleshooting assistance." ) super().__init__(msg, target=ErrorTarget.FUNCTION_PATH) class ListFunctionResponseError(DynamicListError): pass class FunctionPathValidationError(DynamicListError): pass
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/openai_metrics_calculator.py
import tiktoken from importlib.metadata import version from promptflow.exceptions import UserErrorException IS_LEGACY_OPENAI = version("openai").startswith("0.") class OpenAIMetricsCalculator: def __init__(self, logger=None) -> None: self._logger = logger def get_openai_metrics_from_api_call(self, api_call: dict): total_metrics = {} if self._need_collect_metrics(api_call): try: metrics = self._get_openai_metrics_for_signal_api(api_call) self.merge_metrics_dict(total_metrics, metrics) except Exception as ex: self._log_warning(f"Failed to calculate metrics due to exception: {ex}.") children = api_call.get("children") if children is not None: for child in children: child_metrics = self.get_openai_metrics_from_api_call(child) self.merge_metrics_dict(total_metrics, child_metrics) api_call["system_metrics"] = total_metrics return total_metrics def _need_collect_metrics(self, api_call: dict): if api_call.get("type") != "LLM": return False output = api_call.get("output") if not isinstance(output, dict) and not isinstance(output, list): return False inputs = api_call.get("inputs") if not isinstance(inputs, dict): return False return True def _get_openai_metrics_for_signal_api(self, api_call: dict): output = api_call.get("output") if isinstance(output, dict): usage = output.get("usage") if isinstance(usage, dict): return usage self._log_warning( "Cannot find openai metrics in output, " "will calculate metrics from response data directly." ) name = api_call.get("name") # Support both legacy api and OpenAI v1 api # Legacy api: # https://github.com/openai/openai-python/blob/v0.28.1/openai/api_resources/chat_completion.py # https://github.com/openai/openai-python/blob/v0.28.1/openai/api_resources/completion.py # OpenAI v1 api: # https://github.com/openai/openai-python/blob/main/src/openai/resources/chat/completions.py # https://github.com/openai/openai-python/blob/main/src/openai/resources/completions.py if ( name == "openai.api_resources.chat_completion.ChatCompletion.create" or name == "openai.resources.chat.completions.Completions.create" # openai v1 ): return self._get_openai_metrics_for_chat_api(api_call) elif ( name == "openai.api_resources.completion.Completion.create" or name == "openai.resources.completions.Completions.create" # openai v1 ): return self._get_openai_metrics_for_completion_api(api_call) else: raise CalculatingMetricsError(f"Calculating metrics for api {name} is not supported.") def _try_get_model(self, inputs, output): if IS_LEGACY_OPENAI: api_type = inputs.get("api_type") if not api_type: raise CalculatingMetricsError("Cannot calculate metrics for none or empty api_type.") if api_type == "azure": model = inputs.get("engine") else: model = inputs.get("model") else: if isinstance(output, dict): model = output.get("model") else: model = output[0].model if len(output) > 0 and hasattr(output[0], "model") else None if not model: model = inputs.get("model") if not model: raise CalculatingMetricsError( "Cannot get a valid model to calculate metrics. " "Please specify a engine for AzureOpenAI API or a model for OpenAI API." ) return model def _get_openai_metrics_for_chat_api(self, api_call): inputs = api_call.get("inputs") output = api_call.get("output") metrics = {} enc, tokens_per_message, tokens_per_name = self._get_encoding_for_chat_api(self._try_get_model(inputs, output)) metrics["prompt_tokens"] = self._get_prompt_tokens_from_messages( inputs["messages"], enc, tokens_per_message, tokens_per_name ) if isinstance(output, list): if IS_LEGACY_OPENAI: metrics["completion_tokens"] = len(output) else: metrics["completion_tokens"] = len( [chunk for chunk in output if chunk.choices and chunk.choices[0].delta.content] ) else: metrics["completion_tokens"] = self._get_completion_tokens_for_chat_api(output, enc) metrics["total_tokens"] = metrics["prompt_tokens"] + metrics["completion_tokens"] return metrics def _get_encoding_for_chat_api(self, model): try: enc = tiktoken.encoding_for_model(model) except KeyError: enc = tiktoken.get_encoding("cl100k_base") if model == "gpt-35-turbo-0301": tokens_per_message = 4 tokens_per_name = -1 elif "gpt-35-turbo" in model or "gpt-3.5-turbo" in model or "gpt-4" in model: tokens_per_message = 3 tokens_per_name = 1 else: raise CalculatingMetricsError(f"Calculating metrics for model {model} is not supported.") return enc, tokens_per_message, tokens_per_name def _get_prompt_tokens_from_messages(self, messages, enc, tokens_per_message, tokens_per_name): prompt_tokens = 0 for message in messages: prompt_tokens += tokens_per_message for key, value in message.items(): prompt_tokens += len(enc.encode(value)) if key == "name": prompt_tokens += tokens_per_name prompt_tokens += 3 return prompt_tokens def _get_completion_tokens_for_chat_api(self, output, enc): completion_tokens = 0 choices = output.get("choices") if isinstance(choices, list): for ch in choices: if isinstance(ch, dict): message = ch.get("message") if isinstance(message, dict): content = message.get("content") if isinstance(content, str): completion_tokens += len(enc.encode(content)) return completion_tokens def _get_openai_metrics_for_completion_api(self, api_call: dict): metrics = {} inputs = api_call.get("inputs") output = api_call.get("output") enc = self._get_encoding_for_completion_api(self._try_get_model(inputs, output)) metrics["prompt_tokens"] = 0 prompt = inputs.get("prompt") if isinstance(prompt, str): metrics["prompt_tokens"] = len(enc.encode(prompt)) elif isinstance(prompt, list): for pro in prompt: metrics["prompt_tokens"] += len(enc.encode(pro)) if isinstance(output, list): if IS_LEGACY_OPENAI: metrics["completion_tokens"] = len(output) else: metrics["completion_tokens"] = len( [chunk for chunk in output if chunk.choices and chunk.choices[0].text] ) else: metrics["completion_tokens"] = self._get_completion_tokens_for_completion_api(output, enc) metrics["total_tokens"] = metrics["prompt_tokens"] + metrics["completion_tokens"] return metrics def _get_encoding_for_completion_api(self, model): try: return tiktoken.encoding_for_model(model) except KeyError: return tiktoken.get_encoding("p50k_base") def _get_completion_tokens_for_completion_api(self, output, enc): completion_tokens = 0 choices = output.get("choices") if isinstance(choices, list): for ch in choices: if isinstance(ch, dict): text = ch.get("text") if isinstance(text, str): completion_tokens += len(enc.encode(text)) return completion_tokens def merge_metrics_dict(self, metrics: dict, metrics_to_merge: dict): for k, v in metrics_to_merge.items(): metrics[k] = metrics.get(k, 0) + v def _log_warning(self, msg): if self._logger: self._logger.warning(msg) class CalculatingMetricsError(UserErrorException): """The exception that is raised when calculating metrics failed.""" pass
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/load_data.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- import logging import os from pathlib import Path from typing import Any, Dict, List, Tuple, Union from promptflow.exceptions import ErrorTarget, UserErrorException module_logger = logging.getLogger(__name__) def _pd_read_file(local_path: str, logger: logging.Logger = None, max_rows_count: int = None) -> "DataFrame": import pandas as pd local_path = str(local_path) # if file is empty, return empty DataFrame directly if ( os.path.getsize(local_path) == 0 ): # CodeQL [SM01305] Safe use per local_path is set by PRT service not by end user return pd.DataFrame() # load different file formats # set dtype to object to avoid auto type conversion # executor will apply type conversion based on flow definition, so no conversion should be acceptable # note that for csv and tsv format, this will make integer and float columns to be string; # for rest, integer will be int and float will be float dtype = object if local_path.endswith(".csv"): df = pd.read_csv(local_path, dtype=dtype, keep_default_na=False, nrows=max_rows_count) elif local_path.endswith(".json"): df = pd.read_json(local_path, dtype=dtype) elif local_path.endswith(".jsonl"): df = pd.read_json(local_path, dtype=dtype, lines=True, nrows=max_rows_count) elif local_path.endswith(".tsv"): df = pd.read_table(local_path, dtype=dtype, keep_default_na=False, nrows=max_rows_count) elif local_path.endswith(".parquet"): df = pd.read_parquet(local_path) # read_parquet has no parameter dtype else: # parse file as jsonl when extension is not known (including unavailable) # ignore and logging if failed to load file content. try: df = pd.read_json(local_path, dtype=dtype, lines=True, nrows=max_rows_count) except: # noqa: E722 if logger is None: logger = module_logger logger.warning( f"File {Path(local_path).name} is not supported format: " f"csv, tsv, json, jsonl, parquet. Ignoring it." ) return pd.DataFrame() return df def _bfs_dir(dir_path: List[str]) -> Tuple[List[str], List[str]]: """BFS traverse directory with depth 1, returns files and directories""" files, dirs = [], [] for path in dir_path: for filename in os.listdir(path): file = Path(path, filename).resolve() if file.is_file(): files.append(str(file)) else: dirs.append(str(file)) return files, dirs def _handle_dir(dir_path: str, max_rows_count: int, logger: logging.Logger = None) -> "DataFrame": """load data from directory""" import pandas as pd df = pd.DataFrame() # BFS traverse directory to collect files to load target_dir = [str(dir_path)] while len(target_dir) > 0: files, dirs = _bfs_dir(target_dir) for file in files: current_df = _pd_read_file(file, logger=logger, max_rows_count=max_rows_count) df = pd.concat([df, current_df]) length = len(df) if max_rows_count and length >= max_rows_count: df = df.head(max_rows_count) return df # no readable data in current level, dive into next level target_dir = dirs return df def load_data( local_path: Union[str, Path], *, logger: logging.Logger = None, max_rows_count: int = None ) -> List[Dict[str, Any]]: """load data from local file""" df = load_df(local_path, logger, max_rows_count=max_rows_count) # convert dataframe to list of dict result = [] for _, row in df.iterrows(): result.append(row.to_dict()) return result def load_df(local_path: Union[str, Path], logger: logging.Logger = None, max_rows_count: int = None) -> "DataFrame": """load data from local file to df. For the usage of PRS.""" lp = local_path if isinstance(local_path, Path) else Path(local_path) try: if lp.is_file(): df = _pd_read_file(local_path, logger=logger, max_rows_count=max_rows_count) # honor max_rows_count if it is specified if max_rows_count and len(df) > max_rows_count: df = df.head(max_rows_count) else: df = _handle_dir(local_path, max_rows_count=max_rows_count, logger=logger) except ValueError as e: raise InvalidUserData( message_format="Fail to load invalid data. We support file formats: csv, tsv, json, jsonl, parquet. " "Please check input data." ) from e return df class InvalidUserData(UserErrorException): def __init__(self, **kwargs): super().__init__(target=ErrorTarget.RUNTIME, **kwargs)
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/__init__.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # ---------------------------------------------------------
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/dataclass_serializer.py
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from dataclasses import fields, is_dataclass from datetime import datetime from enum import Enum from typing import Any, Callable, Dict, List, Type, TypeVar from promptflow._core.generator_proxy import GeneratorProxy from promptflow.contracts.tool import ConnectionType T = TypeVar("T") def get_type(obj: type): if is_dataclass(obj): return obj if isinstance(obj, list): return List[get_type(obj[0])] if isinstance(obj, dict): return Dict[str, get_type(obj[list(obj.keys())[0]])] return obj def deserialize_dataclass(cls: Type[T], data: dict) -> T: if not is_dataclass(cls): raise ValueError(f"{cls} is not a dataclass") if not isinstance(data, dict): raise ValueError(f"{data} is not a dict") kwargs = {} for field in fields(cls): if field.name not in data: kwargs[field.name] = field.default continue field_type = get_type(field.type) kwargs[field.name] = deserialize_value(data[field.name], field_type) return cls(**kwargs) def deserialize_value(obj, field_type): if not isinstance(field_type, type): return obj if is_dataclass(field_type): return deserialize_dataclass(field_type, obj) if issubclass(field_type, Enum): return field_type(obj) if issubclass(field_type, datetime) and obj is not None: # Remove Z/z at the end of the string. if obj.endswith("Z") or obj.endswith("z"): return datetime.fromisoformat(obj[:-1]) return datetime.fromisoformat(obj) return obj def serialize(value: object, remove_null: bool = False, serialization_funcs: Dict[type, Callable] = None) -> dict: if serialization_funcs: for cls, f in serialization_funcs.items(): if isinstance(value, cls): return f(value) if isinstance(value, datetime): return value.isoformat() + "Z" if isinstance(value, Enum): return value.value if isinstance(value, list): return [serialize(v, remove_null, serialization_funcs) for v in value] if isinstance(value, GeneratorProxy): # TODO: The current implementation of the serialize function is not self-explanatory, as value.items is mutable # whereas the serialize function should deal with a fixed object. We should rename the function to # to_serializable to better reflect its purpose. return value.items # Note that custom connection check should before dict check if ConnectionType.is_connection_value(value): return ConnectionType.serialize_conn(value) if isinstance(value, dict): return {k: serialize(v, remove_null, serialization_funcs) for k, v in value.items()} if is_dataclass(value): if hasattr(value, "serialize"): result = value.serialize() else: result = { f.name: serialize(getattr(value, f.name), remove_null, serialization_funcs) for f in fields(value) } if not remove_null: return result null_keys = [k for k, v in result.items() if v is None] for k in null_keys: result.pop(k) return result try: from pydantic import BaseModel if isinstance(value, BaseModel): # Handle pydantic model, which is used in langchain return value.dict() except ImportError: # Ignore ImportError if pydantic is not installed pass return value def assertEqual(a: dict, b: dict, path: str = ""): if isinstance(a, dict): assert isinstance(b, dict), f"{path}: {type(a)} != {type(b)}" assert set(a.keys()) == set(b.keys()), f"{path}: {set(a.keys())} != {set(b.keys())}" for key in a.keys(): assertEqual(a[key], b[key], path + "." + key) elif isinstance(a, list): assert isinstance(b, list), f"{path}: {type(a)} != {type(b)}" assert len(a) == len(b), f"{path}: {len(a)} != {len(b)}" for i in range(len(a)): assertEqual(a[i], b[i], path + f"[{i}]") else: assert a == b, f"{path}: {a} != {b}" def convert_eager_flow_output_to_dict(value: Any): """ Convert the output of eager flow to a dict. Since the output of eager flow may not be a dict, we need to convert it to a dict in batch mode. Examples: 1. If the output is a dict, return it directly: value = {"output": 1} -> {"output": 1} 2. If the output is a dataclass, convert it to a dict: value = SampleDataClass(output=1) -> {"output": 1} 3. If the output is not a dict or dataclass, convert it to a dict by adding a key "output": value = 1 -> {"output": 1} """ if isinstance(value, dict): return value elif is_dataclass(value): return {f.name: getattr(value, f.name) for f in fields(value)} else: return {"output": value}
0
promptflow_repo/promptflow/src/promptflow/promptflow
promptflow_repo/promptflow/src/promptflow/promptflow/_utils/multimedia_data_converter.py
import re from dataclasses import dataclass from enum import Enum from functools import partial from pathlib import Path from typing import Any, Callable from promptflow._utils.multimedia_utils import is_multimedia_dict class ResourceType(Enum): """ Enumeration of different types of multimedia resources. We support path, URL, and base64 data. """ PATH = "path" URL = "url" BASE64 = "base64" @dataclass class MultimediaInfo: """ Data class that holds information about a multimedia resource. """ mime_type: str # The MIME type of the multimedia resource. resource_type: ResourceType # The type of the resource as defined in ResourceType. content: str # The content of the multimedia resource (path, URL, or base64 string). class AbstractMultimediaFormatAdapter: """ Abstract base class for adapting multimedia formats. This class provides an interface for extracting multimedia information from various data formats or constructing data formats from multimedia information. Subclasses should implement methods for specific contract version. A MultimediaInfo object contains the mime_type, resource_type, and the actual content of the multimedia resource. The multimedia data is typically represented as a dictionary with keys and values conforming to a specific multimedia data contract. One multimedia data example from 20231201 version: {"data:image/jpg;path": "logo.jpg"} """ # Check if the original_data is a multimedia format according to the current contract version. def is_valid_format(self, original_data: Any): raise NotImplementedError() def extract_info(self, original_data: Any) -> MultimediaInfo: """ Get the MultimediaInfo from the original data. Will include mime_type, resource_type, and content. Below is an example for the 20231201 version: {"data:image/jpg;path": "logo.jpg"} -> "image/jpg", "path", "logo.jpg" """ raise NotImplementedError() def create_data(self, info: MultimediaInfo) -> Any: """ Create multimedia data from info. Below is an example for the 20231201 version: "image/jpg", "path", "logo.jpg" -> {"data:image/jpg;path": "logo.jpg"} """ raise NotImplementedError() class MultimediaFormatAdapter20231201(AbstractMultimediaFormatAdapter): """ 20231201 version is our first contract's version, supports text and images (path/url/base64). 20231201 is the version number assigned by the customer in the YAML file. Path format example: {"data:image/jpg;path": "logo.jpg"} Url format example: {"data:image/jpg;url": "https://example.com/logo.jpg"} Base64 format example: {"data:image/jpg;base64": "base64 string"} """ MIME_PATTERN = re.compile(r"^data:(.*);(path|base64|url)$") def is_valid_format(self, original_data: Any): return isinstance(original_data, dict) and is_multimedia_dict(original_data) def extract_info(self, original_data: Any) -> MultimediaInfo: if not self.is_valid_format(original_data): return None for key in original_data: match = re.match(self.MIME_PATTERN, key) if match: mime_type, resource_type = match.group(1), match.group(2) content = original_data[key] return MultimediaInfo(mime_type, ResourceType(resource_type), content) return None def create_data(self, info: MultimediaInfo): return {f"data:{info.mime_type};{info.resource_type.value}": info.content} class AbstractMultimediaInfoConverter: def convert(self, info: MultimediaInfo) -> MultimediaInfo: """ Change info's mime type/resource type/content based on the client's logic. For cases that do not need to be changed, just return the original info. :param info: The MultimediaInfo to be converted. :type info: MultimediaInfo :return: The converted MultimediaInfo. :rtype: MultimediaInfo """ raise NotImplementedError() class MultimediaConverter: def __init__(self, flow_file: Path): """ Initialize the MultimediaConverter. :param flow_file: The path to the YAML file. The YAML content will be used to determine the contract version. :type flow_file: Path """ # TODO: check yaml content to determine the current contract version. # Different contract version will have different multimedia format. # The version exists in the yaml file, so we need to load the yaml to get version and init converter. self.format_adapter = MultimediaFormatAdapter20231201() def convert_content_recursively(self, content: Any, client_converter: AbstractMultimediaInfoConverter): """ Recursively converts multimedia data format in content. :param content: The object that may contain multimedia data. :type content: Any :param client_converter: The converter to modify multimedia info based on the client's logic. :type client_converter: AbstractMultimediaInfoConverter :return: The content with changed multimedia format. :rtype: Any """ process_func = partial(self._convert_content, converter=client_converter) return self._process_content_recursively(content, process_func=process_func) def _convert_content(self, original_data: Any, converter: AbstractMultimediaInfoConverter): if not self.format_adapter.is_valid_format(original_data): return original_data info = self.format_adapter.extract_info(original_data) # When can't extract multimedia info from original_data, return original_data directly. if info is None: return original_data info = converter.convert(info) return self.format_adapter.create_data(info) def _process_content_recursively(self, content: Any, process_func: Callable): if isinstance(content, list): return [self._process_content_recursively(item, process_func) for item in content] elif isinstance(content, dict): if self.format_adapter.is_valid_format(content): return process_func(original_data=content) else: return {k: self._process_content_recursively(v, process_func) for k, v in content.items()} else: return content
0
promptflow_repo/promptflow/src
promptflow_repo/promptflow/src/promptflow-tools/README.dev.md
# Development Guide ## Prerequisites ```bash pip install -r requirements.txt pip install pytest pytest-mock ``` ## Run tests - Create connection config file by `cp connections.json.example connections.json`. - Fill in fields manually in `connections.json`. - `cd tests` and run `pytest -s -v` to run all tests. ## Run tests in CI Use this [workflow](https://github.com/microsoft/promptflow/actions/workflows/tools_secret_upload.yml) to upload secrets in key vault. The secrets you uploaded would be used in [tools tests](https://github.com/microsoft/promptflow/actions/workflows/tools_tests.yml). Note that you only need to upload the SECRETS. > [!NOTE] After triggering the workflow, kindly request approval from Promptflow Support before proceeding further. ## PR check-in criteria Here's a friendly heads-up! We've got some criteria for you to self-review your code changes. It's a great way to double-check your work and make sure everything is in order before you share it. Happy coding! ### Maintain code quality The code you submit in your pull request should adhere to the following guidelines: - **Maintain clean code**: The code should be clean, easy to understand, and well-structured to promote readability and maintainability. - **Comment on your code**: Use comments to explain the purpose of certain code segments, particularly complex or non-obvious ones. This assists other developers in understanding your work. - **Correct typos and grammatical errors**: Ensure that the code and file names are free from spelling mistakes and grammatical errors. This enhances the overall presentation and clarity of your code. - **Avoid hard-coded values**: It is best to avoid hard-coding values unless absolutely necessary. Instead, use variables, constants, or configuration files, which can be easily modified without changing the source code. - **Prevent code duplication**: Modify the original code to be more general instead of duplicating it. Code duplication can lead to longer, more complex code that is harder to maintain. - **Implement effective error handling**: Good error handling is critical for troubleshooting customer issues and analyzing key metrics. Follow the guidelines provided in the [Error Handling Guideline](https://msdata.visualstudio.com/Vienna/_git/PromptFlow?path=/docs/error_handling_guidance.md&_a=preview) and reference the [exception.py](https://github.com/microsoft/promptflow/blob/main/src/promptflow-tools/promptflow/tools/exception.py) file for examples. ### Ensure high test coverage Test coverage is crucial for maintaining code quality. Please adhere to the following guidelines: - **Comprehensive Testing**: Include unit tests and e2e tests for any new functionality introduced. - **Exception Testing**: Make sure to incorporate unit tests for all exceptions. These tests should verify error codes, error messages, and other important values. For reference, you can check out [TestHandleOpenAIError](https://github.com/microsoft/promptflow/blob/main/src/promptflow-tools/tests/test_handle_openai_error.py). - **VSCode Testing**: If you're adding a new built-in tool, make sure to test your tool within the VSCode environment prior to submitting your PR. For more guidance on this, refer to [Use your tool from VSCode Extension](https://github.com/microsoft/promptflow/blob/main/docs/how-to-guides/develop-a-tool/create-and-use-tool-package.md#use-your-tool-from-vscode-extension). ### Add documents Ensure to include documentation for your new built-in tool, following the guidelines below: - **Error-Free Content**: Rectify all typographical and grammatical errors in the documentation. This will ensure clarity and readability. - **Code Alignment**: The documentation should accurately reflect the current state of your code. Ensure that all described functionalities and behaviors match with your implemented code. - **Functional Links**: Verify that all embedded links within the documentation are functioning properly, leading to the correct resources or references.
0
promptflow_repo/promptflow/src
promptflow_repo/promptflow/src/promptflow-tools/MANIFEST.in
include promptflow/tools/yamls/*.yaml
0
promptflow_repo/promptflow/src
promptflow_repo/promptflow/src/promptflow-tools/requirements.txt
google-search-results==2.4.1 promptflow # promptflow-tools only supports openai 1.x openai>=1.0.0
0
promptflow_repo/promptflow/src
promptflow_repo/promptflow/src/promptflow-tools/connections.json.example
{ "azure_open_ai_connection": { "type": "AzureOpenAIConnection", "value": { "api_key": "aoai-api-key", "api_base": "aoai-api-endpoint", "api_type": "azure", "api_version": "2023-07-01-preview" }, "module": "promptflow.connections" }, "serp_connection": { "type": "SerpConnection", "value": { "api_key": "serpapi-api-key" }, "module": "promptflow.connections" }, "custom_connection": { "type": "CustomConnection", "value": { "key1": "hey", "key2": "val2" }, "module": "promptflow.connections", "secret_keys": [ "key1" ] }, "gpt2_connection": { "type": "CustomConnection", "value": { "endpoint_url": "custom-endpoint-url", "model_family": "GPT2", "endpoint_api_key": "custom-endpoint-api-key" }, "module": "promptflow.connections", "secret_keys": [ "endpoint_api_key" ] }, "open_source_llm_ws_service_connection": { "type": "CustomConnection", "value": { "service_credential": "service-credential" }, "module": "promptflow.connections", "secret_keys": [ "service_credential" ] }, "open_ai_connection": { "type": "OpenAIConnection", "value": { "api_key": "openai-api-key", "organization": "openai-api-org" }, "module": "promptflow.connections" }, "azure_content_safety_connection": { "type": "AzureContentSafetyConnection", "value": { "api_key": "azure-content-safety-api-key", "endpoint": "azure-content-safety-endpoint-url", "api_version": "2023-10-01", "api_type": "Content Safety", "name": "prompt-flow-acs-tool-test" }, "module": "promptflow.connections" } }
0
promptflow_repo/promptflow/src
promptflow_repo/promptflow/src/promptflow-tools/README.md
# Prompt flow tools [![Python package](https://img.shields.io/pypi/v/promptflow-tools)](https://pypi.org/project/promptflow-tools/) [![License: MIT](https://img.shields.io/github/license/microsoft/promptflow)](https://github.com/microsoft/promptflow/blob/main/LICENSE) ## Introduction Tools are the fundamental building blocks of a flow in Azure Machine Learning prompt flow. Each tool is a simple, executable unit with a specific function, allowing users to perform various tasks. By combining different tools, users can create a flow that accomplishes a wide range of goals. One of the key benefit of prompt flow tools is their seamless integration with third-party APIs and python open source packages. This not only improves the functionality of large language models but also makes the development process more efficient. In this package, we provide a set of builtin tools of prompt flow, which are the most commonly used tools in the development of AI applications. We also provide a flexible way for users to create their own tools and share them with others. See [Create and Use Tool Package](https://github.com/microsoft/promptflow/blob/main/docs/how-to-guides/develop-a-tool/create-and-use-tool-package.md) for more details.
0
promptflow_repo/promptflow/src
promptflow_repo/promptflow/src/promptflow-tools/setup.py
import os import re from io import open from typing import Any, List, Match, cast from setuptools import find_namespace_packages, setup PACKAGE_NAME = "promptflow-tools" PACKAGE_FOLDER_PATH = "promptflow" def parse_requirements(file_name: str) -> List[str]: with open(file_name) as f: return [ require.strip() for require in f if require.strip() and not require.startswith('#') ] # Version extraction inspired from 'requests' with open(os.path.join(PACKAGE_FOLDER_PATH, "version.txt"), "r") as fd: version_content = fd.read() print(version_content) version = cast(Match[Any], re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', version_content, re.MULTILINE)).group(1) if not version: raise RuntimeError("Cannot find version information") with open("README.md", encoding="utf-8") as f: readme = f.read() with open("CHANGELOG.md", encoding="utf-8") as f: changelog = f.read() setup( name=PACKAGE_NAME, version=version, description="Prompt flow built-in tools", long_description_content_type="text/markdown", long_description=readme + "\n\n" + changelog, author="Microsoft Corporation", author_email="[email protected]", url="https://github.com/microsoft/promptflow", classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], python_requires="<4.0,>=3.8", install_requires=parse_requirements('requirements.txt'), packages=find_namespace_packages(include=[f"{PACKAGE_FOLDER_PATH}.*"]), entry_points={ "package_tools": ["builtins = promptflow.tools.list:list_package_tools"], }, include_package_data=True, project_urls={ "Bug Reports": "https://github.com/microsoft/promptflow/issues", "Source": "https://github.com/microsoft/promptflow", }, )
0
promptflow_repo/promptflow/src
promptflow_repo/promptflow/src/promptflow-tools/NOTICE.txt
NOTICES AND INFORMATION Do Not Translate or Localize This software incorporates material from third parties. Microsoft makes certain open source code available at https://3rdpartysource.microsoft.com, or you may send a check or money order for US $5.00, including the product name, the open source component name, platform, and version number, to: Source Code Compliance Team Microsoft Corporation One Microsoft Way Redmond, WA 98052 USA Notwithstanding any other terms, you may reverse engineer this software to the extent required to debug changes to any libraries licensed under the GNU Lesser General Public License. --------------------------------------------------------- google-search-results 2.4.1 - MIT MIT License Copyright (c) 2018-2021 SerpApi Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ---------------------------------------------------------
0
promptflow_repo/promptflow/src
promptflow_repo/promptflow/src/promptflow-tools/CHANGELOG.md
# Release History ## 1.0.0 (2023.11.30) ### Features Added - Support openai 1.x in promptflow-tools - Add new tool "OpenAI GPT-4V"
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/test_aoai.py
from unittest.mock import patch import pytest import json from promptflow.connections import AzureOpenAIConnection from promptflow.tools.aoai import chat, completion from promptflow.tools.exception import WrappedOpenAIError from tests.utils import AttrDict @pytest.mark.usefixtures("use_secrets_config_file") class TestAOAI: def test_aoai_completion(self, aoai_provider): prompt_template = "please complete this sentence: world war II " # test whether tool can handle param "stop" with value empty list # as openai raises "[] is not valid under any of the given schemas - 'stop'" aoai_provider.completion( prompt=prompt_template, deployment_name="gpt-35-turbo-instruct", stop=[], logit_bias={} ) def test_aoai_stream_completion(self, aoai_provider): prompt_template = "please complete this sentence: world war II " # test whether tool can handle param "stop" with value empty list in stream mode # as openai raises "[] is not valid under any of the given schemas - 'stop'" aoai_provider.completion( prompt=prompt_template, deployment_name="gpt-35-turbo-instruct", stop=[], logit_bias={}, stream=True ) def test_aoai_chat(self, aoai_provider, example_prompt_template, chat_history): result = aoai_provider.chat( prompt=example_prompt_template, deployment_name="gpt-35-turbo", max_tokens="32", temperature=0, user_input="Fill in more details about trend 2.", chat_history=chat_history, ) assert "additional details" in result.lower() def test_aoai_chat_api(self, azure_open_ai_connection, example_prompt_template, chat_history): result = chat( connection=azure_open_ai_connection, prompt=example_prompt_template, deployment_name="gpt-35-turbo", max_tokens="inF", temperature=0, user_input="Write a slogan for product X", chat_history=chat_history, ) assert "Product X".lower() in result.lower() @pytest.mark.parametrize( "function_call", [ "auto", {"name": "get_current_weather"}, ], ) def test_aoai_chat_with_function( self, azure_open_ai_connection, example_prompt_template, chat_history, functions, function_call): result = chat( connection=azure_open_ai_connection, prompt=example_prompt_template, deployment_name="gpt-35-turbo", max_tokens="inF", temperature=0, user_input="What is the weather in Boston?", chat_history=chat_history, functions=functions, function_call=function_call ) assert "function_call" in result assert result["function_call"]["name"] == "get_current_weather" def test_aoai_chat_with_name_in_roles( self, azure_open_ai_connection, example_prompt_template_with_name_in_roles, chat_history, functions): result = chat( connection=azure_open_ai_connection, prompt=example_prompt_template_with_name_in_roles, deployment_name="gpt-35-turbo", max_tokens="inF", temperature=0, functions=functions, name="get_location", result=json.dumps({"location": "Austin"}), question="What is the weather in Boston?", prev_question="Where is Boston?" ) assert "function_call" in result assert result["function_call"]["name"] == "get_current_weather" def test_aoai_chat_message_with_no_content(self, aoai_provider): # missing colon after role name. Sometimes following prompt may result in empty content. prompt = ( "user:\n what is your name\nassistant\nAs an AI language model developed by" " OpenAI, I do not have a name. You can call me OpenAI or AI assistant. " "How can I assist you today?" ) # assert chat tool can handle. aoai_provider.chat(prompt=prompt, deployment_name="gpt-35-turbo") # empty content after role name:\n prompt = "user:\n" aoai_provider.chat(prompt=prompt, deployment_name="gpt-35-turbo") def test_aoai_stream_chat(self, aoai_provider, example_prompt_template, chat_history): result = aoai_provider.chat( prompt=example_prompt_template, deployment_name="gpt-35-turbo", max_tokens="32", temperature=0, user_input="Fill in more details about trend 2.", chat_history=chat_history, stream=True, ) answer = "" while True: try: answer += next(result) except Exception: break assert "additional details" in answer.lower() @pytest.mark.parametrize( "params, expected", [ ({"stop": [], "logit_bias": {}}, {"stop": None}), ({"stop": ["</i>"], "logit_bias": {"16": 100, "17": 100}}, {}), ], ) def test_aoai_parameters(self, params, expected): for k, v in params.items(): if k not in expected: expected[k] = v deployment_name = "dummy" conn_dict = {"api_key": "dummy", "api_base": "base", "api_version": "dummy_ver", "api_type": "azure"} conn = AzureOpenAIConnection(**conn_dict) def mock_completion(self, **kwargs): assert kwargs["model"] == deployment_name for k, v in expected.items(): assert kwargs[k] == v, f"Expect {k} to be {v}, but got {kwargs[k]}" text = kwargs["prompt"] return AttrDict({"choices": [AttrDict({"text": text})]}) with patch("openai.resources.Completions.create", new=mock_completion): prompt = "dummy_prompt" result = completion(connection=conn, prompt=prompt, deployment_name=deployment_name, **params) assert result == prompt def test_aoai_chat_with_response_format( self, azure_open_ai_connection, example_prompt_template, chat_history): result = chat( connection=azure_open_ai_connection, prompt=example_prompt_template, deployment_name="gpt-35-turbo-1106", temperature=0, user_input="Write a slogan for product X, please response with json.", chat_history=chat_history, response_format={"type": "json_object"} ) assert "x:".lower() in result.lower() @pytest.mark.parametrize( "response_format, user_input, error_message, error_codes, exception", [ ({"type": "json"}, "Write a slogan for product X, please response with json.", "\'json\' is not one of [\'json_object\', \'text\']", "UserError/OpenAIError/BadRequestError", WrappedOpenAIError), ({"type": "json_object"}, "Write a slogan for product X", "\'messages\' must contain the word \'json\' in some form", "UserError/OpenAIError/BadRequestError", WrappedOpenAIError), ({"types": "json_object"}, "Write a slogan for product X", "The response_format parameter needs to be a dictionary such as {\"type\": \"text\"}", "UserError/OpenAIError/BadRequestError", WrappedOpenAIError) ] ) def test_aoai_chat_with_invalid_response_format( self, azure_open_ai_connection, example_prompt_template, chat_history, response_format, user_input, error_message, error_codes, exception ): with pytest.raises(exception) as exc_info: chat( connection=azure_open_ai_connection, prompt=example_prompt_template, deployment_name="gpt-35-turbo-1106", temperature=0, user_input=user_input, chat_history=chat_history, response_format=response_format ) assert error_message in exc_info.value.message assert exc_info.value.error_codes == error_codes.split("/") def test_aoai_chat_with_not_support_response_format_json_mode_model( self, azure_open_ai_connection, example_prompt_template, chat_history ): with pytest.raises(WrappedOpenAIError) as exc_info: chat( connection=azure_open_ai_connection, prompt=example_prompt_template, deployment_name="gpt-35-turbo", temperature=0, user_input="Write a slogan for product X, please response with json.", chat_history=chat_history, response_format={"type": "json_object"} ) error_message = "The response_format parameter needs to be a dictionary such as {\"type\": \"text\"}." assert error_message in exc_info.value.message assert exc_info.value.error_codes == "UserError/OpenAIError/BadRequestError".split("/") def test_aoai_chat_with_response_format_text_mode( self, azure_open_ai_connection, example_prompt_template, chat_history ): result = chat( connection=azure_open_ai_connection, prompt=example_prompt_template, deployment_name="gpt-35-turbo", temperature=0, user_input="Write a slogan for product X.", chat_history=chat_history, response_format={"type": "text"} ) assert "Product X".lower() in result.lower()
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/test_aoai_gptv.py
import pytest from promptflow.tools.aoai_gpt4v import AzureOpenAI @pytest.fixture def azure_openai_provider(azure_open_ai_connection) -> AzureOpenAI: return AzureOpenAI(azure_open_ai_connection) @pytest.mark.usefixtures("use_secrets_config_file") @pytest.mark.skip("Skipping until we have a Azure OpenAI GPT-4 Vision deployment") class TestAzureOpenAIGPT4V: def test_openai_gpt4v_chat(self, azure_openai_provider, example_prompt_template_with_image, example_image): result = azure_openai_provider.chat( prompt=example_prompt_template_with_image, deployment_name="gpt-4v", max_tokens=480, temperature=0, question="which number did you see in this picture?", image_input=example_image, ) assert "10" == result def test_openai_gpt4v_stream_chat(self, azure_openai_provider, example_prompt_template_with_image, example_image): result = azure_openai_provider.chat( prompt=example_prompt_template_with_image, deployment_name="gpt-4v", max_tokens=480, temperature=0, question="which number did you see in this picture?", image_input=example_image, ) answer = "" while True: try: answer += next(result) except Exception: break assert "10" == result
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/test_serpapi.py
import pytest from promptflow.exceptions import UserErrorException from promptflow.tools.serpapi import Engine, SafeMode, search import tests.utils as utils @pytest.mark.usefixtures("use_secrets_config_file") @pytest.mark.skip_if_no_api_key("serp_connection") class TestSerpAPI: def test_engine(self, serp_connection): query = "cute cats" num = 2 result_dict = search( connection=serp_connection, query=query, num=num, safe=SafeMode.ACTIVE, engine=Engine.GOOGLE.value) utils.is_json_serializable(result_dict, "serp api search()") assert result_dict["search_metadata"]["google_url"] is not None assert int(result_dict["search_parameters"]["num"]) == num assert result_dict["search_parameters"]["safe"].lower() == "active" result_dict = search( connection=serp_connection, query=query, num=num, safe=SafeMode.ACTIVE, engine=Engine.BING.value) utils.is_json_serializable(result_dict, "serp api search()") assert int(result_dict["search_parameters"]["count"]) == num assert result_dict["search_parameters"]["safe_search"].lower() == "strict" def test_invalid_api_key(self, serp_connection): serp_connection.api_key = "hello" query = "cute cats" num = 2 engine = Engine.GOOGLE.value error_msg = "Invalid API key. Your API key should be here: https://serpapi.com/manage-api-key" with pytest.raises(UserErrorException) as exc_info: search(connection=serp_connection, query=query, num=num, engine=engine) assert error_msg == exc_info.value.args[0] @pytest.mark.parametrize("engine", [Engine.GOOGLE.value, Engine.BING.value]) def test_invalid_query(self, serp_connection, engine): query = "" num = 2 error_msg = "Missing query `q` parameter." with pytest.raises(UserErrorException) as exc_info: search(connection=serp_connection, query=query, num=num, engine=engine) assert error_msg == exc_info.value.args[0]
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/test_openai_gpt4v.py
import pytest from promptflow.tools.openai_gpt4v import OpenAI @pytest.fixture def openai_provider(open_ai_connection) -> OpenAI: return OpenAI(open_ai_connection) @pytest.mark.usefixtures("use_secrets_config_file") @pytest.mark.skip_if_no_api_key("open_ai_connection") class TestOpenAIGPT4V: def test_openai_gpt4v_chat(self, openai_provider, example_prompt_template_with_image, example_image): result = openai_provider.chat( prompt=example_prompt_template_with_image, model="gpt-4-vision-preview", max_tokens=480, temperature=0, question="which number did you see in this picture?", image_input=example_image, ) assert "10" == result def test_openai_gpt4v_stream_chat(self, openai_provider, example_prompt_template_with_image, example_image): result = openai_provider.chat( prompt=example_prompt_template_with_image, model="gpt-4-vision-preview", max_tokens=480, temperature=0, question="which number did you see in this picture?", image_input=example_image, ) answer = "" while True: try: answer += next(result) except Exception: break assert "10" == result
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/test_embedding.py
import pytest from promptflow.tools.embedding import embedding from promptflow.tools.exception import InvalidConnectionType @pytest.mark.usefixtures("use_secrets_config_file") class TestEmbedding: def test_embedding_conn_aoai(self, azure_open_ai_connection): result = embedding( connection=azure_open_ai_connection, input="The food was delicious and the waiter", deployment_name="text-embedding-ada-002") assert len(result) == 1536 @pytest.mark.skip_if_no_api_key("open_ai_connection") def test_embedding_conn_oai(self, open_ai_connection): result = embedding( connection=open_ai_connection, input="The food was delicious and the waiter", model="text-embedding-ada-002") assert len(result) == 1536 def test_embedding_invalid_connection_type(self, serp_connection): error_codes = "UserError/ToolValidationError/InvalidConnectionType" with pytest.raises(InvalidConnectionType) as exc_info: embedding(connection=serp_connection, input="hello", deployment_name="text-embedding-ada-002") assert exc_info.value.error_codes == error_codes.split("/")
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/utils.py
import json class AttrDict(dict): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def __getattr__(self, item): if item in self: return self.__getitem__(item) return super().__getattribute__(item) def is_json_serializable(data, function_name): try: json.dumps(data) except TypeError: raise TypeError(f"{function_name} output is not JSON serializable!") def verify_url_exists(endpoint_url: str) -> bool: import urllib.request from urllib.request import HTTPError from urllib.error import URLError try: urllib.request.urlopen( urllib.request.Request(endpoint_url), timeout=50) except HTTPError as e: # verify that the connection is not authorized, anything else would mean the endpoint is failed return e.code == 403 except URLError: # Endpoint does not exist - skip the test return False raise Exception("Task Succeeded unexpectedly.")
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/conftest.py
import json import os import pytest import sys from pathlib import Path from pytest_mock import MockerFixture # noqa: E402 from tests.utils import verify_url_exists # Avoid circular dependencies: Use import 'from promptflow._internal' instead of 'from promptflow' # since the code here is in promptflow namespace as well from promptflow._internal import ConnectionManager from promptflow.connections import CustomConnection, OpenAIConnection, SerpConnection from promptflow.contracts.multimedia import Image from promptflow.tools.aoai import AzureOpenAI PROMOTFLOW_ROOT = Path(__file__).absolute().parents[1] CONNECTION_FILE = (PROMOTFLOW_ROOT / "connections.json").resolve().absolute().as_posix() root_str = str(PROMOTFLOW_ROOT.resolve().absolute()) if root_str not in sys.path: sys.path.insert(0, root_str) # connection @pytest.fixture(autouse=True) def use_secrets_config_file(mocker: MockerFixture): mocker.patch.dict(os.environ, {"PROMPTFLOW_CONNECTIONS": CONNECTION_FILE}) @pytest.fixture def azure_open_ai_connection(): return ConnectionManager().get("azure_open_ai_connection") @pytest.fixture def aoai_provider(azure_open_ai_connection) -> AzureOpenAI: aoai_provider = AzureOpenAI(azure_open_ai_connection) return aoai_provider @pytest.fixture def open_ai_connection(): return ConnectionManager().get("open_ai_connection") @pytest.fixture def serp_connection(): return ConnectionManager().get("serp_connection") def verify_om_llm_custom_connection(connection: CustomConnection) -> bool: '''Verify that there is a MIR endpoint up and available for the Custom Connection. We explicitly do not pass the endpoint key to avoid the delay in generating a response. ''' return verify_url_exists(connection.configs['endpoint_url']) @pytest.fixture def gpt2_custom_connection(): return ConnectionManager().get("gpt2_connection") @pytest.fixture def open_model_llm_ws_service_connection() -> bool: try: creds_custom_connection: CustomConnection = ConnectionManager().get("open_source_llm_ws_service_connection") subs = json.loads(creds_custom_connection.secrets['service_credential']) for key, value in subs.items(): os.environ[key] = value return True except Exception as e: print(f"""Something failed setting environment variables for service credentials. Error: {e}""") return False @pytest.fixture(autouse=True) def skip_if_no_api_key(request, mocker): mocker.patch.dict(os.environ, {"PROMPTFLOW_CONNECTIONS": CONNECTION_FILE}) if request.node.get_closest_marker('skip_if_no_api_key'): conn_name = request.node.get_closest_marker('skip_if_no_api_key').args[0] connection = request.getfixturevalue(conn_name) # if dummy placeholder key, skip. if isinstance(connection, OpenAIConnection) or isinstance(connection, SerpConnection): if "-api-key" in connection.api_key: pytest.skip('skipped because no key') elif isinstance(connection, CustomConnection): if "endpoint_api_key" not in connection.secrets or "-api-key" in connection.secrets["endpoint_api_key"]: pytest.skip('skipped because no key') # Verify Custom Connections, but only those used by the Open_Model_LLM Tool if "endpoint_url" in connection.configs and "-endpoint-url" not in connection.configs["endpoint_url"]: if not verify_om_llm_custom_connection(connection): pytest.skip('skipped because the connection is not valid') # example prompts @pytest.fixture def example_prompt_template() -> str: with open(PROMOTFLOW_ROOT / "tests/test_configs/prompt_templates/marketing_writer/prompt.jinja2") as f: prompt_template = f.read() return prompt_template @pytest.fixture def example_prompt_template_with_name_in_roles() -> str: with open(PROMOTFLOW_ROOT / "tests/test_configs/prompt_templates/prompt_with_name_in_roles.jinja2") as f: prompt_template = f.read() return prompt_template @pytest.fixture def chat_history() -> list: with open(PROMOTFLOW_ROOT / "tests/test_configs/prompt_templates/marketing_writer/history.json") as f: history = json.load(f) return history @pytest.fixture def example_prompt_template_with_function() -> str: with open(PROMOTFLOW_ROOT / "tests/test_configs/prompt_templates/prompt_with_function.jinja2") as f: prompt_template = f.read() return prompt_template @pytest.fixture def example_prompt_template_with_image() -> str: with open(PROMOTFLOW_ROOT / "tests/test_configs/prompt_templates/prompt_with_image.jinja2") as f: prompt_template = f.read() return prompt_template @pytest.fixture def example_image() -> Image: with open(PROMOTFLOW_ROOT / "tests/test_configs/prompt_templates/images/number10.jpg", "rb") as f: image = Image(f.read()) return image # functions @pytest.fixture def functions(): return [ { "name": "get_current_weather", "parameters": { "type": "object", "properties": {}, }, } ] @pytest.fixture def azure_content_safety_connection(): return ConnectionManager().get("azure_content_safety_connection")
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/pytest.ini
[pytest] markers = skip_if_no_api_key: skip the test if actual api key is not provided.
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/test_open_model_llm.py
import copy import os import pytest import re from azure.identity import DefaultAzureCredential from typing import List, Dict from promptflow.tools.exception import ( OpenModelLLMUserError, OpenModelLLMKeyValidationError ) from promptflow.tools.open_model_llm import ( OpenModelLLM, API, ContentFormatterBase, LlamaContentFormatter, list_endpoint_names, list_deployment_names, CustomConnectionsContainer, get_model_type, ModelFamily, ServerlessEndpointsContainer ) def validate_response(response): assert len(response) > 15 def verify_prompt_role_delimiters(message: str, codes: List[str]): assert codes == "UserError/OpenModelLLMUserError".split("/") message_pattern = re.compile( r"The Chat API requires a specific format for prompt definition, and the prompt should include separate " + r"lines as role delimiters: ('(assistant|user|system):\\n'[,.]){3} Current parsed role 'the quick brown" + r" fox' does not meet the requirement. If you intend to use the " + r"Completion API, please select the appropriate API type and deployment name. If you do intend to use the " + r"Chat API, please refer to the guideline at https://aka.ms/pfdoc/chat-prompt or view the samples in our " + r"gallery that contain 'Chat' in the name.") is_match = message_pattern.match(message) assert is_match @pytest.fixture def verify_service_endpoints(open_model_llm_ws_service_connection) -> Dict[str, List[str]]: if not open_model_llm_ws_service_connection: pytest.skip("Service Credential not available") print("open_model_llm_ws_service_connection completed") required_env_vars = ["AZUREML_ARM_SUBSCRIPTION", "AZUREML_ARM_RESOURCEGROUP", "AZUREML_ARM_WORKSPACE_NAME", "AZURE_CLIENT_ID", "AZURE_TENANT_ID", "AZURE_CLIENT_SECRET"] for rev in required_env_vars: if rev not in os.environ: raise Exception(f"test not setup correctly. Missing Required Environment Variable:{rev}") @pytest.fixture def endpoints_provider(verify_service_endpoints) -> Dict[str, List[str]]: from azure.ai.ml import MLClient credential = DefaultAzureCredential(exclude_interactive_browser_credential=False) ml_client = MLClient( credential=credential, subscription_id=os.getenv("AZUREML_ARM_SUBSCRIPTION"), resource_group_name=os.getenv("AZUREML_ARM_RESOURCEGROUP"), workspace_name=os.getenv("AZUREML_ARM_WORKSPACE_NAME")) endpoints = {} for ep in ml_client.online_endpoints.list(): endpoints[ep.name] = [d.name for d in ml_client.online_deployments.list(ep.name)] return endpoints @pytest.fixture def chat_endpoints_provider(endpoints_provider: Dict[str, List[str]]) -> Dict[str, List[str]]: chat_endpoint_names = ["gpt2", "llama-chat"] chat_endpoints = {} for key, value in endpoints_provider.items(): for ep_name in chat_endpoint_names: if ep_name in key: chat_endpoints[key] = value if len(chat_endpoints) <= 0: pytest.skip("No Chat Endpoints Found") return chat_endpoints @pytest.fixture def completion_endpoints_provider(endpoints_provider: Dict[str, List[str]]) -> Dict[str, List[str]]: completion_endpoint_names = ["gpt2", "llama-comp"] completion_endpoints = {} for key, value in endpoints_provider.items(): for ep_name in completion_endpoint_names: if ep_name in key: completion_endpoints[key] = value if len(completion_endpoints) <= 0: pytest.skip("No Completion Endpoints Found") return completion_endpoints @pytest.mark.usefixtures("use_secrets_config_file") class TestOpenModelLLM: stateless_os_llm = OpenModelLLM() gpt2_connection = "connection/gpt2_connection" llama_connection = "connection/llama_chat_connection" llama_serverless_connection = "connection/llama_chat_serverless" completion_prompt = "The quick brown fox" chat_prompt = """system: * You are a AI which helps Customers complete a sentence. * Your answer should complete the provided prompt. * Your answer should be followed by a discussion of the meaning. * The discussion part of your answer must be long and detailed. user: """ + completion_prompt def test_open_model_llm_completion(self, verify_service_endpoints): response = self.stateless_os_llm.call( self.completion_prompt, API.COMPLETION, endpoint_name=self.gpt2_connection) validate_response(response) def test_open_model_llm_completion_with_deploy(self, verify_service_endpoints): response = self.stateless_os_llm.call( self.completion_prompt, API.COMPLETION, endpoint_name=self.gpt2_connection, deployment_name="gpt2-10") validate_response(response) def test_open_model_llm_chat(self, verify_service_endpoints): response = self.stateless_os_llm.call( self.chat_prompt, API.CHAT, endpoint_name=self.gpt2_connection) validate_response(response) def test_open_model_llm_chat_with_deploy(self, verify_service_endpoints): response = self.stateless_os_llm.call( self.chat_prompt, API.CHAT, endpoint_name=self.gpt2_connection, deployment_name="gpt2-10") validate_response(response) def test_open_model_llm_chat_with_max_length(self, verify_service_endpoints): response = self.stateless_os_llm.call( self.chat_prompt, API.CHAT, endpoint_name=self.gpt2_connection, max_new_tokens=30) # GPT-2 doesn't take this parameter validate_response(response) @pytest.mark.skip_if_no_api_key("gpt2_custom_connection") def test_open_model_llm_con_url_chat(self, gpt2_custom_connection): tmp = copy.deepcopy(gpt2_custom_connection) del tmp.configs['endpoint_url'] with pytest.raises(OpenModelLLMKeyValidationError) as exc_info: customConnectionsContainer = CustomConnectionsContainer() customConnectionsContainer.get_endpoint_from_custom_connection(connection=tmp) assert exc_info.value.message == """Required key `endpoint_url` not found in given custom connection. Required keys are: endpoint_url,model_family.""" assert exc_info.value.error_codes == "UserError/ToolValidationError/OpenModelLLMKeyValidationError".split("/") @pytest.mark.skip_if_no_api_key("gpt2_custom_connection") def test_open_model_llm_con_key_chat(self, gpt2_custom_connection): tmp = copy.deepcopy(gpt2_custom_connection) del tmp.secrets['endpoint_api_key'] with pytest.raises(OpenModelLLMKeyValidationError) as exc_info: customConnectionsContainer = CustomConnectionsContainer() customConnectionsContainer.get_endpoint_from_custom_connection(connection=tmp) assert exc_info.value.message == ( "Required secret key `endpoint_api_key` " + """not found in given custom connection. Required keys are: endpoint_api_key.""") assert exc_info.value.error_codes == "UserError/ToolValidationError/OpenModelLLMKeyValidationError".split("/") @pytest.mark.skip_if_no_api_key("gpt2_custom_connection") def test_open_model_llm_con_model_chat(self, gpt2_custom_connection): tmp = copy.deepcopy(gpt2_custom_connection) del tmp.configs['model_family'] with pytest.raises(OpenModelLLMKeyValidationError) as exc_info: customConnectionsContainer = CustomConnectionsContainer() customConnectionsContainer.get_endpoint_from_custom_connection(connection=tmp) assert exc_info.value.message == """Required key `model_family` not found in given custom connection. Required keys are: endpoint_url,model_family.""" assert exc_info.value.error_codes == "UserError/ToolValidationError/OpenModelLLMKeyValidationError".split("/") def test_open_model_llm_escape_chat(self): danger = r"The quick \brown fox\tjumped\\over \the \\boy\r\n" out_of_danger = ContentFormatterBase.escape_special_characters(danger) assert out_of_danger == "The quick \\brown fox\\tjumped\\\\over \\the \\\\boy\\r\\n" def test_open_model_llm_llama_parse_chat_with_chat(self): LlamaContentFormatter.parse_chat(self.chat_prompt) def test_open_model_llm_llama_parse_multi_turn(self): multi_turn_chat = """user: You are a AI which helps Customers answer questions. What is the best movie of all time? assistant: Mobius, which starred Jared Leto user: Why was that the greatest movie of all time? """ LlamaContentFormatter.parse_chat(multi_turn_chat) def test_open_model_llm_llama_parse_ignore_whitespace(self): bad_chat_prompt = f"""system: You are a AI which helps Customers answer questions. user: user: {self.completion_prompt}""" with pytest.raises(OpenModelLLMUserError) as exc_info: LlamaContentFormatter.parse_chat(bad_chat_prompt) verify_prompt_role_delimiters(exc_info.value.message, exc_info.value.error_codes) def test_open_model_llm_llama_parse_chat_with_comp(self): with pytest.raises(OpenModelLLMUserError) as exc_info: LlamaContentFormatter.parse_chat(self.completion_prompt) verify_prompt_role_delimiters(exc_info.value.message, exc_info.value.error_codes) def test_open_model_llm_chat_endpoint_name(self, chat_endpoints_provider): for endpoint_name in chat_endpoints_provider: response = self.stateless_os_llm.call( self.chat_prompt, API.CHAT, endpoint_name=f"onlineEndpoint/{endpoint_name}") validate_response(response) def test_open_model_llm_chat_endpoint_name_with_deployment(self, chat_endpoints_provider): for endpoint_name in chat_endpoints_provider: for deployment_name in chat_endpoints_provider[endpoint_name]: response = self.stateless_os_llm.call( self.chat_prompt, API.CHAT, endpoint_name=f"onlineEndpoint/{endpoint_name}", deployment_name=deployment_name) validate_response(response) def test_open_model_llm_completion_endpoint_name(self, completion_endpoints_provider): for endpoint_name in completion_endpoints_provider: response = self.stateless_os_llm.call( self.completion_prompt, API.COMPLETION, endpoint_name=f"onlineEndpoint/{endpoint_name}") validate_response(response) def test_open_model_llm_completion_endpoint_name_with_deployment(self, completion_endpoints_provider): for endpoint_name in completion_endpoints_provider: for deployment_name in completion_endpoints_provider[endpoint_name]: response = self.stateless_os_llm.call( self.completion_prompt, API.COMPLETION, endpoint_name=f"onlineEndpoint/{endpoint_name}", deployment_name=deployment_name) validate_response(response) def test_open_model_llm_llama_chat(self, verify_service_endpoints): response = self.stateless_os_llm.call(self.chat_prompt, API.CHAT, endpoint_name=self.llama_connection) validate_response(response) def test_open_model_llm_llama_serverless(self, verify_service_endpoints): response = self.stateless_os_llm.call( self.chat_prompt, API.CHAT, endpoint_name=self.llama_serverless_connection) validate_response(response) def test_open_model_llm_llama_chat_history(self, verify_service_endpoints): chat_history_prompt = """system: * Given the following conversation history and the users next question, answer the next question. * If the conversation is irrelevant or empty, acknowledge and ask for more input. * Do not add more details than necessary to the question. chat history: {% for item in chat_history %} user: {{ item.inputs.chat_input }} assistant: {{ item.outputs.chat_output }} {% endfor %} user: {{ chat_input }}""" response = self.stateless_os_llm.call( chat_history_prompt, API.CHAT, endpoint_name=self.llama_connection, chat_history=[ { "inputs": { "chat_input": "Hi" }, "outputs": { "chat_output": "Hello! How can I assist you today?" } }, { "inputs": { "chat_input": "What is Azure compute instance?" }, "outputs": { "chat_output": "An Azure Machine Learning compute instance is a fully managed cloud-based" + " workstation for data scientists. It provides a pre-configured and managed development" + " environment in the cloud for machine learning. Compute instances can also be used as a" + " compute target for training and inferencing for development and testing purposes. They" + " have a job queue, run jobs securely in a virtual network environment, and can run" + " multiple small jobs in parallel. Additionally, compute instances support single-node" + " multi-GPU distributed training jobs." } } ], chat_input="Sorry I didn't follow, could you say that again?") validate_response(response) def test_open_model_llm_dynamic_list_ignore_deployment(self, verify_service_endpoints): deployments = list_deployment_names( subscription_id=os.getenv("AZUREML_ARM_SUBSCRIPTION"), resource_group_name=os.getenv("AZUREML_ARM_RESOURCEGROUP"), workspace_name=os.getenv("AZUREML_ARM_WORKSPACE_NAME"), endpoint=None) assert len(deployments) == 1 assert deployments[0]['value'] == 'default' deployments = list_deployment_names( subscription_id=os.getenv("AZUREML_ARM_SUBSCRIPTION"), resource_group_name=os.getenv("AZUREML_ARM_RESOURCEGROUP"), workspace_name=os.getenv("AZUREML_ARM_WORKSPACE_NAME"), endpoint='') assert len(deployments) == 1 assert deployments[0]['value'] == 'default' deployments = list_deployment_names( subscription_id=os.getenv("AZUREML_ARM_SUBSCRIPTION"), resource_group_name=os.getenv("AZUREML_ARM_RESOURCEGROUP"), workspace_name=os.getenv("AZUREML_ARM_WORKSPACE_NAME"), endpoint='fake_endpoint name') assert len(deployments) == 1 assert deployments[0]['value'] == 'default' def test_open_model_llm_dynamic_list_serverless_test(self, verify_service_endpoints): subscription_id = os.getenv("AZUREML_ARM_SUBSCRIPTION") resource_group_name = os.getenv("AZUREML_ARM_RESOURCEGROUP") workspace_name = os.getenv("AZUREML_ARM_WORKSPACE_NAME") se_container = ServerlessEndpointsContainer() credential = DefaultAzureCredential(exclude_interactive_browser_credential=False) token = credential.get_token("https://management.azure.com/.default").token eps = se_container.list_serverless_endpoints( token, subscription_id, resource_group_name, workspace_name) if len(eps) == 0: pytest.skip("Service Credential not available") endpoint_connection_name = eps[0]["value"].replace("serverlessEndpoint/", "") eps_keys = se_container._list_endpoint_key( token, subscription_id, resource_group_name, workspace_name, endpoint_connection_name ) assert len(eps_keys) == 2 (endpoint_url, endpoint_key, model_family) = se_container.get_serverless_endpoint_key( token, subscription_id, resource_group_name, workspace_name, endpoint_connection_name) assert len(endpoint_url) > 20 assert model_family == "LLaMa" assert endpoint_key == eps_keys['primaryKey'] def test_open_model_llm_dynamic_list_custom_connections_test(self, verify_service_endpoints): custom_container = CustomConnectionsContainer() credential = DefaultAzureCredential(exclude_interactive_browser_credential=False) connections = custom_container.list_custom_connection_names( credential, subscription_id=os.getenv("AZUREML_ARM_SUBSCRIPTION"), resource_group_name=os.getenv("AZUREML_ARM_RESOURCEGROUP"), workspace_name=os.getenv("AZUREML_ARM_WORKSPACE_NAME")) assert len(connections) > 1 def test_open_model_llm_dynamic_list_happy_path(self, verify_service_endpoints): endpoints = list_endpoint_names( subscription_id=os.getenv("AZUREML_ARM_SUBSCRIPTION"), resource_group_name=os.getenv("AZUREML_ARM_RESOURCEGROUP"), workspace_name=os.getenv("AZUREML_ARM_WORKSPACE_NAME"), return_endpoint_url=True ) # we might want to remove this or skip if there are zero endpoints in the long term. # currently we have low cost compute for a GPT2 endpoint, so if nothing else this should be available. assert len(endpoints) > 0 for endpoint in endpoints: assert "value" in endpoint assert "display_value" in endpoint assert "description" in endpoint from tests.utils import verify_url_exists for endpoint in endpoints: if "localConnection/" in endpoint['value'] or not verify_url_exists(endpoint["url"]): continue is_chat = "serverless" in endpoint['value'] or "chat" in endpoint['value'] if is_chat: prompt = self.chat_prompt api_type = API.CHAT else: prompt = self.completion_prompt api_type = API.COMPLETION # test with default endpoint response = self.stateless_os_llm.call( prompt, api_type, endpoint_name=endpoint['value'], max_new_tokens=30, model_kwargs={}) validate_response(response) deployments = list_deployment_names( subscription_id=os.getenv("AZUREML_ARM_SUBSCRIPTION"), resource_group_name=os.getenv("AZUREML_ARM_RESOURCEGROUP"), workspace_name=os.getenv("AZUREML_ARM_WORKSPACE_NAME"), endpoint=endpoint['value']) if "onlineEndpoint" in endpoint['value']: assert len(deployments) > 0 else: assert len(deployments) == 1 assert deployments[0]['value'] == 'default' continue for deployment in deployments: response = self.stateless_os_llm.call( prompt, api_type, endpoint_name=endpoint['value'], deployment_name=deployment['value'], max_new_tokens=30, model_kwargs={}) validate_response(response) def test_open_model_llm_get_model_llama(self): model_assets = [ "azureml://registries/azureml-meta/models/Llama-2-7b-chat/versions/14", "azureml://registries/azureml-meta/models/Llama-2-7b/versions/12", "azureml://registries/azureml-meta/models/Llama-2-13b-chat/versions/12", "azureml://registries/azureml-meta/models/Llama-2-13b/versions/12", "azureml://registries/azureml-meta/models/Llama-2-70b-chat/versions/12", "azureml://registries/azureml-meta/models/Llama-2-70b/versions/13" ] for asset_name in model_assets: assert ModelFamily.LLAMA == get_model_type(asset_name) def test_open_model_llm_get_model_gpt2(self): model_assets = [ "azureml://registries/azureml-staging/models/gpt2/versions/9", "azureml://registries/azureml/models/gpt2/versions/9", "azureml://registries/azureml/models/gpt2-medium/versions/11", "azureml://registries/azureml/models/gpt2-large/versions/11" ] for asset_name in model_assets: assert ModelFamily.GPT2 == get_model_type(asset_name) def test_open_model_llm_get_model_dolly(self): model_assets = [ "azureml://registries/azureml/models/databricks-dolly-v2-12b/versions/11" ] for asset_name in model_assets: assert ModelFamily.DOLLY == get_model_type(asset_name) def test_open_model_llm_get_model_falcon(self): model_assets = [ "azureml://registries/azureml/models/tiiuae-falcon-40b/versions/2", "azureml://registries/azureml/models/tiiuae-falcon-40b/versions/2" ] for asset_name in model_assets: assert ModelFamily.FALCON == get_model_type(asset_name) def test_open_model_llm_get_model_failure_cases(self): bad_model_assets = [ "azureml://registries/azureml-meta/models/CodeLlama-7b-Instruct-hf/versions/3", "azureml://registries/azureml-staging/models/gpt-2/versions/9", "azureml://registries/azureml/models/falcon-40b/versions/2", "azureml://registries/azureml-meta/models/Llama-70b/versions/13", "azureml://registries/azureml/models/openai-whisper-large/versions/14", "azureml://registries/azureml/models/ask-wikipedia/versions/2", "definitely not real", "", "ausreml://registries/azureml/models/ask-wikipedia/versions/2", "azureml://registries/azureml/models/ask-wikipedia/version/2", "azureml://registries/azureml/models/ask-wikipedia/version/" ] for asset_name in bad_model_assets: val = get_model_type(asset_name) assert val is None def test_open_model_llm_local_connection(self, verify_service_endpoints, gpt2_custom_connection): endpoints = list_endpoint_names( subscription_id=os.getenv("AZUREML_ARM_SUBSCRIPTION"), resource_group_name=os.getenv("AZUREML_ARM_RESOURCEGROUP"), workspace_name=os.getenv("AZUREML_ARM_WORKSPACE_NAME"), return_endpoint_url=True ) import uuid connection_name = f"test_local_connection_{uuid.uuid4()}" for e in endpoints: assert e['value'] != connection_name from promptflow._sdk.entities import CustomConnection connection = CustomConnection(name=connection_name, configs={ "endpoint_url": gpt2_custom_connection.configs['endpoint_url'], "model_family": gpt2_custom_connection.configs['model_family']}, secrets={ "endpoint_api_key": gpt2_custom_connection.secrets['endpoint_api_key']}) from promptflow import PFClient as LocalPFClient pf_client = LocalPFClient() pf_client.connections.create_or_update(connection) endpoints = list_endpoint_names( subscription_id=os.getenv("AZUREML_ARM_SUBSCRIPTION"), resource_group_name=os.getenv("AZUREML_ARM_RESOURCEGROUP"), workspace_name=os.getenv("AZUREML_ARM_WORKSPACE_NAME"), force_refresh=True ) found = False target_connection_name = f"localConnection/{connection_name}" for e in endpoints: if e['value'] == target_connection_name: found = True break assert found response = self.stateless_os_llm.call( self.completion_prompt, API.COMPLETION, endpoint_name=target_connection_name) validate_response(response) def test_open_model_llm_package(self): import pkg_resources # Promptflow-tools is not installed in the test pipeline, so we'll skip this test there. Works locally. try: pkg_resources.get_distribution("promptflow-tools") except pkg_resources.DistributionNotFound: pytest.skip("promptflow-tools not installed") found = False target_tool_identifier = "promptflow.tools.open_model_llm.OpenModelLLM.call" for entry_point in pkg_resources.iter_entry_points(group="package_tools"): list_tool_func = entry_point.resolve() package_tools = list_tool_func() for identifier, tool in package_tools.items(): if identifier == target_tool_identifier: import importlib importlib.import_module(tool["module"]) # Import the module to ensure its validity assert not found found = True assert found
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/test_common.py
import pytest from promptflow.contracts.multimedia import Image from promptflow.tools.common import ChatAPIInvalidFunctions, validate_functions, process_function_call, \ parse_chat, find_referenced_image_set, preprocess_template_string, convert_to_chat_list, ChatInputList class TestCommon: @pytest.mark.parametrize( "functions, error_message", [ ([], "functions cannot be an empty list"), (["str"], "is not a dict. Here is a valid function example"), ([{"name": "func1"}], "does not have 'parameters' property"), ([{"name": "func1", "parameters": "param1"}], "should be described as a JSON Schema object"), ([{"name": "func1", "parameters": {"type": "int", "properties": {}}}], "parameters 'type' should be 'object'"), ([{"name": "func1", "parameters": {"type": "object", "properties": []}}], "should be described as a JSON Schema object"), ], ) def test_chat_api_invalid_functions(self, functions, error_message): error_codes = "UserError/ToolValidationError/ChatAPIInvalidFunctions" with pytest.raises(ChatAPIInvalidFunctions) as exc_info: validate_functions(functions) assert error_message in exc_info.value.message assert exc_info.value.error_codes == error_codes.split("/") @pytest.mark.parametrize( "function_call, error_message", [ ("123", "function_call parameter '123' must be a dict"), ({"name1": "get_current_weather"}, 'function_call parameter {"name1": "get_current_weather"} must ' 'contain "name" field'), ], ) def test_chat_api_invalid_function_call(self, function_call, error_message): error_codes = "UserError/ToolValidationError/ChatAPIInvalidFunctions" with pytest.raises(ChatAPIInvalidFunctions) as exc_info: process_function_call(function_call) assert error_message in exc_info.value.message assert exc_info.value.error_codes == error_codes.split("/") @pytest.mark.parametrize( "chat_str, images, expected_result", [ ("system:\nthis is my function:\ndef hello", None, [ {'role': 'system', 'content': 'this is my function:\ndef hello'}]), ("#system:\nthis is my ##function:\ndef hello", None, [ {'role': 'system', 'content': 'this is my ##function:\ndef hello'}]), (" \n system:\nthis is my function:\ndef hello", None, [ {'role': 'system', 'content': 'this is my function:\ndef hello'}]), (" \n # system:\nthis is my function:\ndef hello", None, [ {'role': 'system', 'content': 'this is my function:\ndef hello'}]), ("user:\nhi\nassistant:\nanswer\nfunction:\nname:\nn\ncontent:\nc", None, [ {'role': 'user', 'content': 'hi'}, {'role': 'assistant', 'content': 'answer'}, {'role': 'function', 'name': 'n', 'content': 'c'}]), ("#user :\nhi\n #assistant:\nanswer\n# function:\n##name:\nn\n##content:\nc", None, [ {'role': 'user', 'content': 'hi'}, {'role': 'assistant', 'content': 'answer'}, {'role': 'function', 'name': 'n', 'content': 'c'}]), ("\nsystem:\nfirst\n\nsystem:\nsecond", None, [ {'role': 'system', 'content': 'first'}, {'role': 'system', 'content': 'second'}]), ("\n#system:\nfirst\n\n#system:\nsecond", None, [ {'role': 'system', 'content': 'first'}, {'role': 'system', 'content': 'second'}]), ("\n#system:\nfirst\n#assistant:\n#user:\nsecond", None, [ {'role': 'system', 'content': 'first'}, {'role': 'assistant', 'content': ''}, {'role': 'user', 'content': 'second'} ]), # todo: enable this test case after we support image_url officially # ("#user:\ntell me about the images\nImage(1edf82c2)\nImage(9b65b0f4)", [ # Image("image1".encode()), Image("image2".encode(), "image/png", "https://image_url")], [ # {'role': 'user', 'content': [ # {'type': 'text', 'text': 'tell me about the images'}, # {'type': 'image_url', 'image_url': {'url': 'data:image/*;base64,aW1hZ2Ux'}}, # {'type': 'image_url', 'image_url': 'https://image_url'}]}, # ]) ] ) def test_success_parse_role_prompt(self, chat_str, images, expected_result): actual_result = parse_chat(chat_str, images) assert actual_result == expected_result @pytest.mark.parametrize( "chat_str, expected_result", [ ("\n#system:\n##name:\nAI \n content:\nfirst\n\n#user:\nsecond", [ {'role': 'system', 'name': 'AI', 'content': 'first'}, {'role': 'user', 'content': 'second'}]), ("\nuser:\nname:\n\nperson\n content:\n", [ {'role': 'user', 'name': 'person', 'content': ''}]), ("\nsystem:\nname:\n\n content:\nfirst", [ {'role': 'system', 'content': 'name:\n\n content:\nfirst'}]), ("\nsystem:\nname:\n\n", [ {'role': 'system', 'content': 'name:'}]) ] ) def test_parse_chat_with_name_in_role_prompt(self, chat_str, expected_result): actual_result = parse_chat(chat_str) assert actual_result == expected_result @pytest.mark.parametrize( "kwargs, expected_result", [ ({}, set()), ({"image_1": Image("image1".encode()), "image_2": Image("image2".encode()), "t1": "text"}, { Image("image1".encode()), Image("image2".encode()) }), ({"images": [Image("image1".encode()), Image("image2".encode())]}, { Image("image1".encode()), Image("image2".encode()) }), ({"image_1": Image("image1".encode()), "image_2": Image("image1".encode())}, { Image("image1".encode()) }), ({"images": {"image_1": Image("image1".encode()), "image_2": Image("image2".encode())}}, { Image("image1".encode()), Image("image2".encode()) }) ] ) def test_find_referenced_image_set(self, kwargs, expected_result): actual_result = find_referenced_image_set(kwargs) assert actual_result == expected_result @pytest.mark.parametrize( "input_string, expected_output", [ ("![image]({{img1}})", "\n{{img1}}\n"), ("![image]({{img1}})![image]({{img2}})", "\n{{img1}}\n\n{{img2}}\n"), ("No image here", "No image here"), ("![image]({{img1}}) Some text ![image]({{img2}})", "\n{{img1}}\n Some text \n{{img2}}\n"), ], ) def test_preprocess_template_string(self, input_string, expected_output): actual_result = preprocess_template_string(input_string) assert actual_result == expected_output @pytest.mark.parametrize( "input_data, expected_output", [ ({}, {}), ({"key": "value"}, {"key": "value"}), (["item1", "item2"], ChatInputList(["item1", "item2"])), ({"key": ["item1", "item2"]}, {"key": ChatInputList(["item1", "item2"])}), (["item1", ["nested_item1", "nested_item2"]], ChatInputList(["item1", ChatInputList(["nested_item1", "nested_item2"])])), ], ) def test_convert_to_chat_list(self, input_data, expected_output): actual_result = convert_to_chat_list(input_data) assert actual_result == expected_output
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/test_openai.py
import pytest import json from promptflow.tools.openai import chat, completion, OpenAI from promptflow.tools.exception import WrappedOpenAIError @pytest.fixture def openai_provider(open_ai_connection) -> OpenAI: return OpenAI(open_ai_connection) @pytest.mark.usefixtures("use_secrets_config_file") @pytest.mark.skip_if_no_api_key("open_ai_connection") class TestOpenAI: def test_openai_completion(self, openai_provider): prompt_template = "please complete this sentence: world war II " openai_provider.completion(prompt=prompt_template) def test_openai_stream_completion(self, openai_provider): prompt_template = "please complete this sentence: world war II " openai_provider.completion(prompt=prompt_template, stream=True) def test_openai_completion_api(self, open_ai_connection): prompt_template = "please complete this sentence: world war II " completion(open_ai_connection, prompt=prompt_template) def test_openai_chat(self, openai_provider, example_prompt_template, chat_history): result = openai_provider.chat( prompt=example_prompt_template, model="gpt-3.5-turbo", max_tokens=32, temperature=0, user_input="Fill in more details about trend 2.", chat_history=chat_history, ) assert "trend 2" in result.lower() def test_openai_stream_chat(self, openai_provider, example_prompt_template, chat_history): result = openai_provider.chat( prompt=example_prompt_template, model="gpt-3.5-turbo", max_tokens=32, temperature=0, user_input="Fill in more details about trend 2.", chat_history=chat_history, stream=True, ) answer = "" while True: try: answer += next(result) except Exception: break assert "trend 2" in answer.lower() def test_openai_chat_api(self, open_ai_connection, example_prompt_template, chat_history): result = chat( connection=open_ai_connection, prompt=example_prompt_template, model="gpt-3.5-turbo", max_tokens="inF", temperature=0, user_input="Write a slogan for product X", chat_history=chat_history, ) assert "Product X".lower() in result.lower() def test_openai_prompt_with_function( self, open_ai_connection, example_prompt_template_with_function, functions): result = chat( connection=open_ai_connection, prompt=example_prompt_template_with_function, model="gpt-3.5-turbo", temperature=0, # test input functions. functions=functions, # test input prompt containing function role. name="get_location", result=json.dumps({"location": "Austin"}), question="What is the weather in Boston?", prev_question="Where is Boston?" ) assert result["function_call"]["name"] == "get_current_weather" def test_openai_chat_with_response_format(self, open_ai_connection, example_prompt_template, chat_history): result = chat( connection=open_ai_connection, prompt=example_prompt_template, model="gpt-4-1106-preview", temperature=0, user_input="Write a slogan for product X, please reponse with json.", chat_history=chat_history, response_format={"type": "json_object"} ) assert "Product X".lower() in result.lower() @pytest.mark.parametrize( "response_format, user_input, error_message, error_codes, exception", [ ({"type": "json"}, "Write a slogan for product X, please reponse with json.", "\'json\' is not one of [\'json_object\', \'text\']", "UserError/OpenAIError/BadRequestError", WrappedOpenAIError), ({"type": "json_object"}, "Write a slogan for product X", "\'messages\' must contain the word \'json\' in some form", "UserError/OpenAIError/BadRequestError", WrappedOpenAIError), ({"types": "json_object"}, "Write a slogan for product X", "The response_format parameter needs to be a dictionary such as {\"type\": \"text\"}", "UserError/OpenAIError/BadRequestError", WrappedOpenAIError) ] ) def test_openai_chat_with_invalid_response_format( self, open_ai_connection, example_prompt_template, chat_history, response_format, user_input, error_message, error_codes, exception ): with pytest.raises(exception) as exc_info: chat( connection=open_ai_connection, prompt=example_prompt_template, model="gpt-4-1106-preview", temperature=0, user_input=user_input, chat_history=chat_history, response_format=response_format ) assert error_message in exc_info.value.message assert exc_info.value.error_codes == error_codes.split("/") def test_openai_chat_with_not_support_response_format_json_mode_model( self, open_ai_connection, example_prompt_template, chat_history ): with pytest.raises(WrappedOpenAIError) as exc_info: chat( connection=open_ai_connection, prompt=example_prompt_template, model="gpt-3.5-turbo", temperature=0, user_input="Write a slogan for product X, please reponse with json.", chat_history=chat_history, response_format={"type": "json_object"} ) error_message = "The response_format parameter needs to be a dictionary such as {\"type\": \"text\"}." assert error_message in exc_info.value.message assert exc_info.value.error_codes == "UserError/OpenAIError/BadRequestError".split("/") def test_openai_chat_with_response_format_text_mode( self, open_ai_connection, example_prompt_template, chat_history ): result = chat( connection=open_ai_connection, prompt=example_prompt_template, model="gpt-3.5-turbo", temperature=0, user_input="Write a slogan for product X.", chat_history=chat_history, response_format={"type": "text"} ) assert "Product X".lower() in result.lower()
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/test_acs.py
import pytest from promptflow.tools.azure_content_safety import analyze_text @pytest.mark.usefixtures("use_secrets_config_file") class TestAzureContentSafety: def test_azure_content_safety_analyze_happy_path(self, azure_content_safety_connection): text = "I hate you." result = analyze_text( connection=azure_content_safety_connection, text=text ) assert "suggested_action" in result assert "action_by_category" in result
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/tests/test_handle_openai_error.py
import httpx import pytest from jinja2.exceptions import TemplateSyntaxError from openai import ( APIConnectionError, RateLimitError, AuthenticationError, BadRequestError, APITimeoutError, InternalServerError, UnprocessableEntityError ) from promptflow.tools.aoai import chat, completion from promptflow.tools.common import handle_openai_error from promptflow.tools.exception import ChatAPIInvalidRole, WrappedOpenAIError, to_openai_error_message, \ JinjaTemplateError, LLMError, ChatAPIFunctionRoleInvalidFormat from promptflow.tools.openai import chat as openai_chat from promptflow.tools.aoai_gpt4v import AzureOpenAI as AzureOpenAIVision from pytest_mock import MockerFixture from promptflow.exceptions import UserErrorException @pytest.mark.usefixtures("use_secrets_config_file") class TestHandleOpenAIError: def test_aoai_chat_message_invalid_format(self, aoai_provider): # chat api prompt should follow the format of "system:\nmessage1\nuser:\nmessage2". prompt = "what is your name" error_codes = "UserError/ToolValidationError/ChatAPIInvalidRole" with pytest.raises(ChatAPIInvalidRole, match="The Chat API requires a specific format for prompt") as exc_info: aoai_provider.chat(prompt=prompt, deployment_name="gpt-35-turbo") assert exc_info.value.error_codes == error_codes.split("/") def test_aoai_authentication_error_with_bad_api_key(self, azure_open_ai_connection): azure_open_ai_connection.api_key = "hello" prompt_template = "please complete this sentence: world war II " raw_message = ( "Unauthorized. Access token is missing, invalid" ) error_codes = "UserError/OpenAIError/AuthenticationError" with pytest.raises(WrappedOpenAIError) as exc_info: chat(azure_open_ai_connection, prompt=f"user:\n{prompt_template}", deployment_name="gpt-35-turbo") assert raw_message in exc_info.value.message assert exc_info.value.error_codes == error_codes.split("/") def test_aoai_connection_error_with_bad_api_base(self, azure_open_ai_connection): azure_open_ai_connection.api_base = "https://gpt-test-eus11.openai.azure.com/" prompt_template = "please complete this sentence: world war II " error_codes = "UserError/OpenAIError/APIConnectionError" with pytest.raises(WrappedOpenAIError) as exc_info: chat(azure_open_ai_connection, prompt=f"user:\n{prompt_template}", deployment_name="gpt-35-turbo") assert "Connection error." in exc_info.value.message assert exc_info.value.error_codes == error_codes.split("/") def test_aoai_not_found_error_with_bad_api_version(self, azure_open_ai_connection): """NotFoundError: Resource not found""" azure_open_ai_connection.api_version = "2022-12-23" prompt_template = "please complete this sentence: world war II " raw_message = "Resource not found" error_codes = "UserError/OpenAIError/NotFoundError" # Chat will throw: Exception occurs: NotFoundError: Resource not found with pytest.raises(WrappedOpenAIError) as exc_info: chat(azure_open_ai_connection, prompt=f"user:\n{prompt_template}", deployment_name="gpt-35-turbo") assert raw_message in exc_info.value.message assert exc_info.value.error_codes == error_codes.split("/") def test_aoai_not_found_error_with_bad_deployment(self, aoai_provider): """ NotFoundError: The API deployment for this resource does not exist. If you created the deployment within the last 5 minutes, please wait a moment and try again. """ # This will throw InvalidRequestError prompt_template = "please complete this sentence: world war II " deployment = "hello" raw_message = ( "The API deployment for this resource does not exist. If you created the deployment " "within the last 5 minutes, please wait a moment and try again." ) error_codes = "UserError/OpenAIError/NotFoundError" with pytest.raises(WrappedOpenAIError) as exc_info: aoai_provider.chat(prompt=f"user:\n{prompt_template}", deployment_name=deployment) assert raw_message in exc_info.value.message assert exc_info.value.error_codes == error_codes.split("/") def test_rate_limit_error_insufficient_quota(self, azure_open_ai_connection, mocker: MockerFixture): dummyEx = RateLimitError("Something went wrong", response=httpx.Response( 429, request=httpx.Request('GET', 'https://www.example.com')), body={"type": "insufficient_quota"}) mock_method = mocker.patch("openai.resources.Completions.create", side_effect=dummyEx) error_codes = "UserError/OpenAIError/RateLimitError" with pytest.raises(WrappedOpenAIError) as exc_info: completion(connection=azure_open_ai_connection, prompt="hello", deployment_name="text-ada-001") assert to_openai_error_message(dummyEx) == exc_info.value.message assert mock_method.call_count == 1 assert exc_info.value.error_codes == error_codes.split("/") @pytest.mark.parametrize( "dummyExceptionList", [ ( [ RateLimitError("Something went wrong", response=httpx.Response( 429, request=httpx.Request('GET', 'https://www.example.com')), body=None), APITimeoutError(request=httpx.Request('GET', 'https://www.example.com')), APIConnectionError( message="('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))", request=httpx.Request('GET', 'https://www.example.com')), InternalServerError("Something went wrong", response=httpx.Response( 503, request=httpx.Request('GET', 'https://www.example.com')), body=None), UnprocessableEntityError("Something went wrong", response=httpx.Response( 422, request=httpx.Request('GET', 'https://www.example.com')), body=None) ] ), ], ) def test_retriable_openai_error_handle(self, mocker: MockerFixture, dummyExceptionList): for dummyEx in dummyExceptionList: # Patch the test_method to throw the desired exception patched_test_method = mocker.patch("openai.resources.Completions.create", side_effect=dummyEx) # Apply the retry decorator to the patched test_method max_retry = 2 delay = 0.2 decorated_test_method = handle_openai_error(tries=max_retry, delay=delay)(patched_test_method) mock_sleep = mocker.patch("time.sleep") # Create a separate mock for time.sleep with pytest.raises(UserErrorException) as exc_info: decorated_test_method() assert patched_test_method.call_count == max_retry + 1 assert "Exceed max retry times. " + to_openai_error_message(dummyEx) == exc_info.value.message error_codes = "UserError/OpenAIError/" + type(dummyEx).__name__ assert exc_info.value.error_codes == error_codes.split("/") expected_calls = [ mocker.call(delay), mocker.call(delay * 2), ] mock_sleep.assert_has_calls(expected_calls) @pytest.mark.parametrize( "dummyExceptionList", [ ( [ RateLimitError("Something went wrong", response=httpx.Response( 429, request=httpx.Request('GET', 'https://www.example.com'), headers={"retry-after": "0.3"}), body=None), InternalServerError("Something went wrong", response=httpx.Response( 503, request=httpx.Request('GET', 'https://www.example.com'), headers={"retry-after": "0.3"}), body=None), UnprocessableEntityError("Something went wrong", response=httpx.Response( 422, request=httpx.Request('GET', 'https://www.example.com'), headers={"retry-after": "0.3"}), body=None) ] ), ], ) def test_retriable_openai_error_handle_with_header( self, mocker: MockerFixture, dummyExceptionList ): for dummyEx in dummyExceptionList: # Patch the test_method to throw the desired exception patched_test_method = mocker.patch("promptflow.tools.aoai.completion", side_effect=dummyEx) # Apply the retry decorator to the patched test_method max_retry = 2 delay = 0.2 header_delay = 0.3 decorated_test_method = handle_openai_error(tries=max_retry, delay=delay)(patched_test_method) mock_sleep = mocker.patch("time.sleep") # Create a separate mock for time.sleep with pytest.raises(UserErrorException) as exc_info: decorated_test_method() assert patched_test_method.call_count == max_retry + 1 assert "Exceed max retry times. " + to_openai_error_message(dummyEx) == exc_info.value.message error_codes = "UserError/OpenAIError/" + type(dummyEx).__name__ assert exc_info.value.error_codes == error_codes.split("/") expected_calls = [ mocker.call(header_delay), mocker.call(header_delay * 2), ] mock_sleep.assert_has_calls(expected_calls) @pytest.mark.parametrize( "dummyExceptionList", [ ( [ AuthenticationError("Something went wrong", response=httpx.get('https://www.example.com'), body=None), BadRequestError("Something went wrong", response=httpx.get('https://www.example.com'), body=None), APIConnectionError(message="Something went wrong", request=httpx.Request('GET', 'https://www.example.com')), ] ), ], ) def test_non_retriable_openai_error_handle( self, azure_open_ai_connection, mocker: MockerFixture, dummyExceptionList ): for dummyEx in dummyExceptionList: mock_method = mocker.patch("openai.resources.Completions.create", side_effect=dummyEx) with pytest.raises(UserErrorException) as exc_info: completion(connection=azure_open_ai_connection, prompt="hello", deployment_name="text-ada-001") assert to_openai_error_message(dummyEx) == exc_info.value.message error_codes = "UserError/OpenAIError/" + type(dummyEx).__name__ assert exc_info.value.error_codes == error_codes.split("/") assert mock_method.call_count == 1 def test_unexpected_error_handle(self, azure_open_ai_connection, mocker: MockerFixture): dummyEx = Exception("Something went wrong") chat(connection=azure_open_ai_connection, prompt="user:\nhello", deployment_name="gpt-35-turbo") mock_method = mocker.patch("openai.resources.chat.Completions.create", side_effect=dummyEx) error_codes = "UserError/LLMError" with pytest.raises(LLMError) as exc_info: chat(connection=azure_open_ai_connection, prompt="user:\nhello", deployment_name="gpt-35-turbo") assert to_openai_error_message(dummyEx) != exc_info.value.args[0] assert "OpenAI API hits exception: Exception: Something went wrong" == exc_info.value.message assert mock_method.call_count == 1 assert exc_info.value.error_codes == error_codes.split("/") def test_template_syntax_error_handle(self, azure_open_ai_connection, mocker: MockerFixture): dummyEx = TemplateSyntaxError(message="Something went wrong", lineno=1) mock_method = mocker.patch("jinja2.Template.__new__", side_effect=dummyEx) error_codes = "UserError/ToolValidationError/JinjaTemplateError" with pytest.raises(JinjaTemplateError) as exc_info: chat(connection=azure_open_ai_connection, prompt="user:\nhello", deployment_name="gpt-35-turbo") error_message = "Failed to render jinja template: TemplateSyntaxError: Something went wrong\n line 1. " \ + "Please modify your prompt to fix the issue." assert error_message == exc_info.value.message assert mock_method.call_count == 1 assert exc_info.value.error_codes == error_codes.split("/") @pytest.mark.skip_if_no_api_key("open_ai_connection") def test_model_not_accept_functions_as_param( self, open_ai_connection, example_prompt_template, functions): with pytest.raises(WrappedOpenAIError) as exc_info: openai_chat( connection=open_ai_connection, prompt=example_prompt_template, model="gpt-3.5-turbo-0301", functions=functions ) assert "Current model does not support the `functions` parameter" in exc_info.value.message def test_input_invalid_function_role_prompt(self, azure_open_ai_connection): with pytest.raises(ChatAPIFunctionRoleInvalidFormat) as exc_info: chat( connection=azure_open_ai_connection, prompt="function:\n This is function role prompt", deployment_name="gpt-35-turbo" ) assert "'name' is required if role is function," in exc_info.value.message def test_completion_with_chat_model(self, azure_open_ai_connection): with pytest.raises(UserErrorException) as exc_info: completion(connection=azure_open_ai_connection, prompt="hello", deployment_name="gpt-35-turbo") msg = "Completion API is a legacy api and is going to be deprecated soon. " \ "Please change to use Chat API for current model." assert msg in exc_info.value.message def test_model_not_support_image_input( self, azure_open_ai_connection, example_prompt_template_with_image, example_image): aoai = AzureOpenAIVision(azure_open_ai_connection) with pytest.raises(WrappedOpenAIError) as exc_info: aoai.chat( prompt=example_prompt_template_with_image, deployment_name="gpt-35-turbo", max_tokens=480, temperature=0, question="which number did you see in this picture?", image_input=example_image, ) assert "Current model does not support the image input" in exc_info.value.message @pytest.mark.parametrize( "max_tokens, error_message, error_codes, exception", [ (0, "0 is less than the minimum of 1", "UserError/OpenAIError/BadRequestError", WrappedOpenAIError), (-1, "-1 is less than the minimum of 1", "UserError/OpenAIError/BadRequestError", WrappedOpenAIError), ("asd", "ValueError: invalid literal for int()", "UserError/LLMError", LLMError) ] ) def test_aoai_invalid_max_tokens( self, azure_open_ai_connection, example_prompt_template, chat_history, max_tokens, error_message, error_codes, exception): with pytest.raises(exception) as exc_info: chat( connection=azure_open_ai_connection, prompt=example_prompt_template, deployment_name="gpt-35-turbo", max_tokens=max_tokens, temperature=0, user_input="Write a slogan for product X", chat_history=chat_history, ) assert error_message in exc_info.value.message assert exc_info.value.error_codes == error_codes.split("/")
0
promptflow_repo/promptflow/src/promptflow-tools/tests/test_configs
promptflow_repo/promptflow/src/promptflow-tools/tests/test_configs/prompt_templates/prompt_with_image.jinja2
# system: As an AI assistant, your task involves interpreting images and responding to questions about the image. Remember to provide accurate answers based on the information present in the image. Directly give the answer, no more explanation is needed. # user: {{question}} ![image]({{image_input}})
0
promptflow_repo/promptflow/src/promptflow-tools/tests/test_configs
promptflow_repo/promptflow/src/promptflow-tools/tests/test_configs/prompt_templates/prompt_with_name_in_roles.jinja2
# system: ## name: AI ## content: Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous. # user: ## name: person ## content: {{prev_question}} # assistant: ## name: John ## content: {{prev_answer}} # function: ## name: {{name}} ## content: {{result}} # user: {{question}}
0
promptflow_repo/promptflow/src/promptflow-tools/tests/test_configs
promptflow_repo/promptflow/src/promptflow-tools/tests/test_configs/prompt_templates/prompt_with_function.jinja2
# system: Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous. # user: {{prev_question}} # assistant: {{prev_answer}} # function: ## name: {{name}} ## content: {{result}} # user: {{question}}
0
promptflow_repo/promptflow/src/promptflow-tools/tests/test_configs/prompt_templates
promptflow_repo/promptflow/src/promptflow-tools/tests/test_configs/prompt_templates/marketing_writer/prompt.jinja2
# System: You are a marketing writing assistant.For user: You help come up with creative content ideas and content like marketing emails, blog posts, tweets, ad copy and product descriptions.You write in a friendly yet professional tone but can tailor your writing style that best works for a user-specified audience.If you do not know the answer to a question, respond by saying "I do not know the answer to your question." {% for item in chat_history %} # user: {{item.inputs.user_input}} # assistant: {{item.outputs.response}} {% endfor %} # user: {{user_input}}
0
promptflow_repo/promptflow/src/promptflow-tools/tests/test_configs/prompt_templates
promptflow_repo/promptflow/src/promptflow-tools/tests/test_configs/prompt_templates/marketing_writer/history.json
[ { "inputs": { "user_input": "hello!" }, "outputs": { "response": "Hello! How can I help you today? If you need any creative content ideas or assistance with marketing emails, blog posts, tweets, ad copy, or product descriptions, feel free to ask. I'm here to help!" } }, { "inputs": { "user_input": "I want to share an idea about latest sports business trend." }, "outputs": { "response": "That sounds interesting! Here's a creative content idea for a blog post or article discussing the latest sports business trends:\n\nTitle: \"Game Changers: The Latest Trends Revolutionizing the Sports Business Industry\"\n\nIntroduction:\n- Briefly discuss the importance of sports as a global business and its ever-changing landscape.\n- Mention that the article will explore the latest sports business trends that are shaping the industry.\n\nTrend 1: Fan Engagement Through Digital and Social Media Platforms\n- Explain how teams and leagues are investing in digital and social media platforms to connect with fans and enhance their experience.\n- Discuss the use of platforms such as Twitter, Facebook, and Instagram to engage fans, sell merchandise, and promote content.\n\nTrend 2: Esports and the Rise of Competitive Gaming\n- Discuss the rapid growth of esports and how it has caught the attention of traditional sports teams, advertisers, and investors.\n- Explain the opportunities esports presents for the sports industry, such as revenue from sponsorships, streaming rights, and live events.\n\nTrend 3: Sports Betting Legalization\n- Examine the impact of the increasing legalization of sports betting on the sports industry.\n- Discuss how teams, leagues, and businesses can capitalize on this trend by providing dedicated betting platforms and partnerships with sportsbooks.\n\nTrend 4: Sustainable Practices in Sports\n- Explore how teams and leagues are becoming more environmentally conscious and incorporating sustainable practices into their operations.\n- Mention examples like using renewable energy, reducing waste, and encouraging public transportation.\n\nConclusion:\n- Recap the latest sports business trends that are revolutionizing the industry.\n- Emphasize the need for adaptability and innovation in the sports business world.\n\nFeel free to modify this outline according to your preferences or add more trends if you'd like. Good luck with your content creation!" } } ]
0
promptflow_repo/promptflow/src/promptflow-tools
promptflow_repo/promptflow/src/promptflow-tools/promptflow/version.txt
VERSION = "1.1.0"
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/serpapi.py
import json import sys from enum import Enum import requests # Avoid circular dependencies: Use import 'from promptflow._internal' instead of 'from promptflow' # since the code here is in promptflow namespace as well from promptflow._internal import ToolProvider, tool from promptflow.connections import SerpConnection from promptflow.exceptions import PromptflowException from promptflow.tools.exception import SerpAPIUserError, SerpAPISystemError class SafeMode(str, Enum): ACTIVE = "active" OFF = "off" class Engine(str, Enum): GOOGLE = "google" BING = "bing" class SerpAPI(ToolProvider): def __init__(self, connection: SerpConnection): super().__init__() self.connection = connection def extract_error_message_from_json(self, error_data): error_message = "" # For request was rejected. For example, the api_key is not valid if "error" in error_data: error_message = error_data["error"] return str(error_message) def safe_extract_error_message(self, response): default_error_message = f"SerpAPI search request failed: {response.text}" try: # Keep the same style as SerpAPIClient error_data = json.loads(response.text) print(f"Response text json: {json.dumps(error_data)}", file=sys.stderr) error_message = self.extract_error_message_from_json(error_data) error_message = error_message if len(error_message) > 0 else default_error_message return error_message except Exception as e: # Swallow any exception when extract detailed error message print( f"Unexpected exception occurs while extract error message " f"from response: {type(e).__name__}: {str(e)}", file=sys.stderr, ) return default_error_message # flake8: noqa: C901 @tool def search( self, query: str, # this is required location: str = None, safe: SafeMode = SafeMode.OFF, # Not default to be SafeMode.OFF num: int = 10, engine: Engine = Engine.GOOGLE, # this is required ): from serpapi import SerpApiClient # required parameters. https://serpapi.com/search-api. params = { "q": query, "location": location, "api_key": self.connection.api_key, } if isinstance(engine, Engine): params["engine"] = engine.value else: params["engine"] = engine if safe == SafeMode.ACTIVE: # Ingore invalid value and safe="off" (as default) # For bing and google, they use diff parameters if params["engine"].lower() == "google": params["safe"] = "Active" else: params["safeSearch"] = "Strict" if int(num) > 0: # to combine multiple engines together, we use "num" as the parameter for such purpose if params["engine"].lower() == "google": params["num"] = int(num) else: params["count"] = int(num) search = SerpApiClient(params) # get response try: response = search.get_response() if response.status_code == requests.codes.ok: # default output is json return json.loads(response.text) else: # Step I: Try to get accurate error message at best error_message = self.safe_extract_error_message(response) # Step II: Construct PromptflowException if response.status_code >= 500: raise SerpAPISystemError(message=error_message) else: raise SerpAPIUserError(message=error_message) except Exception as e: # SerpApi is super robust. Set basic error handle if not isinstance(e, PromptflowException): print(f"Unexpected exception occurs: {type(e).__name__}: {str(e)}", file=sys.stderr) error_message = f"SerpAPI search request failed: {type(e).__name__}: {str(e)}" raise SerpAPISystemError(message=error_message) raise @tool def search( connection: SerpConnection, query: str, # this is required location: str = None, safe: SafeMode = SafeMode.OFF, # Not default to be SafeMode.OFF num: int = 10, engine: Engine = Engine.GOOGLE, # this is required ): return SerpAPI(connection).search( query=query, location=location, safe=safe, num=num, engine=engine, )
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/aoai_gpt4v.py
try: from openai import AzureOpenAI as AzureOpenAIClient except Exception: raise Exception( "Please upgrade your OpenAI package to version 1.0.0 or later using the command: pip install --upgrade openai.") from promptflow._internal import ToolProvider, tool from promptflow.connections import AzureOpenAIConnection from promptflow.contracts.types import PromptTemplate from promptflow.tools.common import render_jinja_template, handle_openai_error, parse_chat, \ preprocess_template_string, find_referenced_image_set, convert_to_chat_list, normalize_connection_config, \ post_process_chat_api_response class AzureOpenAI(ToolProvider): def __init__(self, connection: AzureOpenAIConnection): super().__init__() self.connection = connection self._connection_dict = normalize_connection_config(self.connection) azure_endpoint = self._connection_dict.get("azure_endpoint") api_version = self._connection_dict.get("api_version") api_key = self._connection_dict.get("api_key") self._client = AzureOpenAIClient(azure_endpoint=azure_endpoint, api_version=api_version, api_key=api_key) @tool(streaming_option_parameter="stream") @handle_openai_error() def chat( self, prompt: PromptTemplate, deployment_name: str, temperature: float = 1.0, top_p: float = 1.0, # stream is a hidden to the end user, it is only supposed to be set by the executor. stream: bool = False, stop: list = None, max_tokens: int = None, presence_penalty: float = 0, frequency_penalty: float = 0, **kwargs, ) -> str: # keep_trailing_newline=True is to keep the last \n in the prompt to avoid converting "user:\t\n" to "user:". prompt = preprocess_template_string(prompt) referenced_images = find_referenced_image_set(kwargs) # convert list type into ChatInputList type converted_kwargs = convert_to_chat_list(kwargs) chat_str = render_jinja_template(prompt, trim_blocks=True, keep_trailing_newline=True, **converted_kwargs) messages = parse_chat(chat_str, list(referenced_images)) headers = { "Content-Type": "application/json", "ms-azure-ai-promptflow-called-from": "aoai-gpt4v-tool" } params = { "messages": messages, "temperature": temperature, "top_p": top_p, "n": 1, "stream": stream, "presence_penalty": presence_penalty, "frequency_penalty": frequency_penalty, "extra_headers": headers, "model": deployment_name, } if stop: params["stop"] = stop if max_tokens is not None: params["max_tokens"] = max_tokens completion = self._client.chat.completions.create(**params) return post_process_chat_api_response(completion, stream, None)
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/azure_content_safety.py
from enum import Enum from typing import Dict, List, Union import json import requests from promptflow import tool, ToolProvider from promptflow.connections import AzureContentSafetyConnection from promptflow.tools.exception import AzureContentSafetyInputValueError, AzureContentSafetySystemError class TextCategorySensitivity(str, Enum): DISABLE = "disable" LOW_SENSITIVITY = "low_sensitivity" MEDIUM_SENSITIVITY = "medium_sensitivity" HIGH_SENSITIVITY = "high_sensitivity" class AzureContentSafety(ToolProvider): """ Doc reference : https://review.learn.microsoft.com/en-us/azure/cognitive-services/content-safety/quickstart?branch=pr-en-us-233724&pivots=programming-language-rest """ def __init__(self, connection: AzureContentSafetyConnection): self.connection = connection super(AzureContentSafety, self).__init__() @tool def analyze_text( self, text: str, hate_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY, sexual_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY, self_harm_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY, violence_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY, ): content_safety = ContentSafety(self.connection.endpoint, self.connection.api_key, self.connection.api_version) media_type = MediaType.Text blocklists = [] detection_result = content_safety.detect(media_type, text, blocklists) # Set the reject thresholds for each category reject_thresholds = { Category.Hate: switch_category_threshold(hate_category), Category.SelfHarm: switch_category_threshold(self_harm_category), Category.Sexual: switch_category_threshold(sexual_category), Category.Violence: switch_category_threshold(violence_category), } # Make a decision based on the detection result and reject thresholds if self.connection.api_version == "2023-10-01": decision_result = content_safety.make_decision_1001(detection_result, reject_thresholds) else: decision_result = content_safety.make_decision(detection_result, reject_thresholds) return convert_decision_to_json(decision_result) @tool def analyze_text( connection: AzureContentSafetyConnection, text: str, hate_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY, sexual_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY, self_harm_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY, violence_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY, ): return AzureContentSafety(connection).analyze_text( text=text, hate_category=hate_category, sexual_category=sexual_category, self_harm_category=self_harm_category, violence_category=violence_category, ) def switch_category_threshold(sensitivity: TextCategorySensitivity) -> int: switcher = { TextCategorySensitivity.DISABLE: -1, TextCategorySensitivity.LOW_SENSITIVITY: 6, TextCategorySensitivity.MEDIUM_SENSITIVITY: 4, TextCategorySensitivity.HIGH_SENSITIVITY: 2, } return switcher.get(sensitivity, f"Non-supported sensitivity: {sensitivity}") class MediaType(Enum): Text = 1 Image = 2 class Category(Enum): Hate = 1 SelfHarm = 2 Sexual = 3 Violence = 4 class Action(Enum): Accept = "Accept" Reject = "Reject" class Decision(object): def __init__(self, suggested_action: Action, action_by_category: Dict[Category, Action]) -> None: """ Represents the decision made by the content moderation system. Args: - suggested_action (Action): The suggested action to take. - action_by_category (dict[Category, Action]): The action to take for each category. """ self.suggested_action = suggested_action self.action_by_category = action_by_category def convert_decision_to_json(decision: Decision): result_json = {} result_json["suggested_action"] = decision.suggested_action.value category_json = {} for key, value in decision.action_by_category.items(): category_json[key.name] = value.value result_json["action_by_category"] = category_json return result_json class ContentSafety(object): def __init__(self, endpoint: str, subscription_key: str, api_version: str) -> None: """ Creates a new ContentSafety instance. Args: - endpoint (str): The endpoint URL for the Content Safety API. - subscription_key (str): The subscription key for the Content Safety API. - api_version (str): The version of the Content Safety API to use. """ self.endpoint = endpoint self.subscription_key = subscription_key self.api_version = api_version def build_url(self, media_type: MediaType) -> str: """ Builds the URL for the Content Safety API based on the media type. Args: - media_type (MediaType): The type of media to analyze. Returns: - str: The URL for the Content Safety API. """ if media_type == MediaType.Text: return f"{self.endpoint}/contentsafety/text:analyze?api-version={self.api_version}" elif media_type == MediaType.Image: return f"{self.endpoint}/contentsafety/image:analyze?api-version={self.api_version}" else: error_message = f"Invalid Media Type {media_type}" raise AzureContentSafetyInputValueError(message=error_message) def build_headers(self) -> Dict[str, str]: """ Builds the headers for the Content Safety API request. Returns: - dict[str, str]: The headers for the Content Safety API request. """ return { "Ocp-Apim-Subscription-Key": self.subscription_key, "Content-Type": "application/json", "ms-azure-ai-sender": "prompt_flow" } def build_request_body( self, media_type: MediaType, content: str, blocklists: List[str], ) -> dict: """ Builds the request body for the Content Safety API request. Args: - media_type (MediaType): The type of media to analyze. - content (str): The content to analyze. - blocklists (list[str]): The blocklists to use for text analysis. Returns: - dict: The request body for the Content Safety API request. """ if media_type == MediaType.Text: return { "text": content, "blocklistNames": blocklists, } elif media_type == MediaType.Image: return {"image": {"content": content}} else: error_message = f"Invalid Media Type {media_type}" raise AzureContentSafetyInputValueError(message=error_message) def detect( self, media_type: MediaType, content: str, blocklists: List[str] = [], ) -> dict: url = self.build_url(media_type) headers = self.build_headers() request_body = self.build_request_body(media_type, content, blocklists) payload = json.dumps(request_body) response = requests.post(url, headers=headers, data=payload) print("status code: " + response.status_code.__str__()) print("response txt: " + response.text) res_content = response.json() if response.status_code != 200: error_message = f"Error in detecting content: {res_content['error']['message']}" raise AzureContentSafetySystemError(message=error_message) return res_content def get_detect_result_by_category(self, category: Category, detect_result: dict) -> Union[int, None]: if category == Category.Hate: return detect_result.get("hateResult", None) elif category == Category.SelfHarm: return detect_result.get("selfHarmResult", None) elif category == Category.Sexual: return detect_result.get("sexualResult", None) elif category == Category.Violence: return detect_result.get("violenceResult", None) else: error_message = f"Invalid Category {category}" raise AzureContentSafetyInputValueError(message=error_message) def get_detect_result_by_category_1001(self, category: Category, detect_result: dict) -> Union[int, None]: category_res = detect_result.get("categoriesAnalysis", None) for res in category_res: if category.name == res.get("category", None): return res error_message = f"Invalid Category {category}" raise AzureContentSafetyInputValueError(message=error_message) def make_decision( self, detection_result: dict, reject_thresholds: Dict[Category, int], ) -> Decision: action_result = {} final_action = Action.Accept for category, threshold in reject_thresholds.items(): if threshold not in (-1, 0, 2, 4, 6): error_message = "RejectThreshold can only be in (-1, 0, 2, 4, 6)" raise AzureContentSafetyInputValueError(message=error_message) cate_detect_res = self.get_detect_result_by_category(category, detection_result) if cate_detect_res is None or "severity" not in cate_detect_res: error_message = f"Can not find detection result for {category}" raise AzureContentSafetySystemError(message=error_message) severity = cate_detect_res["severity"] action = Action.Reject if threshold != -1 and severity >= threshold else Action.Accept action_result[category] = action if action.value > final_action.value: final_action = action if ( "blocklistsMatchResults" in detection_result and detection_result["blocklistsMatchResults"] and len(detection_result["blocklistsMatchResults"]) > 0 ): final_action = Action.Reject print(f"Action result: {action_result}") return Decision(final_action, action_result) def make_decision_1001( self, detection_result: dict, reject_thresholds: Dict[Category, int], ) -> Decision: action_result = {} final_action = Action.Accept for category, threshold in reject_thresholds.items(): if threshold not in (-1, 0, 2, 4, 6): error_message = "RejectThreshold can only be in (-1, 0, 2, 4, 6)" raise AzureContentSafetyInputValueError(message=error_message) cate_detect_res = self.get_detect_result_by_category_1001( category, detection_result ) if cate_detect_res is None or "severity" not in cate_detect_res: error_message = f"Can not find detection result for {category}" raise AzureContentSafetySystemError(message=error_message) severity = cate_detect_res["severity"] action = ( Action.Reject if threshold != -1 and severity >= threshold else Action.Accept ) action_result[category] = action if action.value > final_action.value: final_action = action if ( "blocklistsMatch" in detection_result and detection_result["blocklistsMatch"] and len(detection_result["blocklistsMatch"]) > 0 ): final_action = Action.Reject print(f"Action result: {action_result}") return Decision(final_action, action_result)
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/template_rendering.py
# Avoid circular dependencies: Use import 'from promptflow._internal' instead of 'from promptflow' # since the code here is in promptflow namespace as well from promptflow._internal import tool from promptflow.tools.common import render_jinja_template @tool def render_template_jinja2(template: str, **kwargs) -> str: return render_jinja_template(template, trim_blocks=True, keep_trailing_newline=True, **kwargs)
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/list.py
from pathlib import Path from ruamel.yaml import YAML def collect_tools_from_directory(base_dir) -> dict: tools = {} yaml = YAML() for f in Path(base_dir).glob("**/*.yaml"): with open(f, "r") as f: tools_in_file = yaml.load(f) for identifier, tool in tools_in_file.items(): tools[identifier] = tool return tools def list_package_tools(): """List package tools""" yaml_dir = Path(__file__).parent / "yamls" return collect_tools_from_directory(yaml_dir)
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/common.py
import functools import json import re import sys import time from typing import List, Mapping from jinja2 import Template from openai import APIConnectionError, APIStatusError, OpenAIError, RateLimitError, APITimeoutError from promptflow.tools.exception import ChatAPIInvalidRole, WrappedOpenAIError, LLMError, JinjaTemplateError, \ ExceedMaxRetryTimes, ChatAPIInvalidFunctions, FunctionCallNotSupportedInStreamMode, \ ChatAPIFunctionRoleInvalidFormat, InvalidConnectionType from promptflow.connections import AzureOpenAIConnection, OpenAIConnection from promptflow.exceptions import SystemErrorException, UserErrorException class ChatInputList(list): """ ChatInputList is a list of ChatInput objects. It is used to override the __str__ method of list to return a string that can be easily parsed as message list. """ def __init__(self, iterable=None): super().__init__(iterable or []) def __str__(self): return "\n".join(map(str, self)) def validate_role(role: str, valid_roles: List[str] = None): if not valid_roles: valid_roles = ["assistant", "function", "user", "system"] if role not in valid_roles: valid_roles_str = ','.join([f'\'{role}:\\n\'' for role in valid_roles]) error_message = ( f"The Chat API requires a specific format for prompt definition, and the prompt should include separate " f"lines as role delimiters: {valid_roles_str}. Current parsed role '{role}'" f" does not meet the requirement. If you intend to use the Completion API, please select the appropriate" f" API type and deployment name. If you do intend to use the Chat API, please refer to the guideline at " f"https://aka.ms/pfdoc/chat-prompt or view the samples in our gallery that contain 'Chat' in the name." ) raise ChatAPIInvalidRole(message=error_message) def validate_functions(functions): function_example = json.dumps({ "name": "function_name", "parameters": { "type": "object", "properties": { "parameter_name": { "type": "integer", "description": "parameter_description" } } }, "description": "function_description" }) common_tsg = f"Here is a valid function example: {function_example}. See more details at " \ "https://platform.openai.com/docs/api-reference/chat/create#chat/create-functions " \ "or view sample 'How to use functions with chat models' in our gallery." if len(functions) == 0: raise ChatAPIInvalidFunctions(message=f"functions cannot be an empty list. {common_tsg}") else: for i, function in enumerate(functions): # validate if the function is a dict if not isinstance(function, dict): raise ChatAPIInvalidFunctions(message=f"function {i} '{function}' is not a dict. {common_tsg}") # validate if has required keys for key in ["name", "parameters"]: if key not in function.keys(): raise ChatAPIInvalidFunctions( message=f"function {i} '{function}' does not have '{key}' property. {common_tsg}") # validate if the parameters is a dict if not isinstance(function["parameters"], dict): raise ChatAPIInvalidFunctions( message=f"function {i} '{function['name']}' parameters '{function['parameters']}' " f"should be described as a JSON Schema object. {common_tsg}") # validate if the parameters has required keys for key in ["type", "properties"]: if key not in function["parameters"].keys(): raise ChatAPIInvalidFunctions( message=f"function {i} '{function['name']}' parameters '{function['parameters']}' " f"does not have '{key}' property. {common_tsg}") # validate if the parameters type is object if function["parameters"]["type"] != "object": raise ChatAPIInvalidFunctions( message=f"function {i} '{function['name']}' parameters 'type' " f"should be 'object'. {common_tsg}") # validate if the parameters properties is a dict if not isinstance(function["parameters"]["properties"], dict): raise ChatAPIInvalidFunctions( message=f"function {i} '{function['name']}' parameters 'properties' " f"should be described as a JSON Schema object. {common_tsg}") def try_parse_name_and_content(role_prompt): # customer can add ## in front of name/content for markdown highlight. # and we still support name/content without ## prefix for backward compatibility. pattern = r"\n*#{0,2}\s*name:\n+\s*(\S+)\s*\n*#{0,2}\s*content:\n?(.*)" match = re.search(pattern, role_prompt, re.DOTALL) if match: return match.group(1), match.group(2) return None def parse_chat(chat_str, images: List = None, valid_roles: List[str] = None): if not valid_roles: valid_roles = ["system", "user", "assistant", "function"] # openai chat api only supports below roles. # customer can add single # in front of role name for markdown highlight. # and we still support role name without # prefix for backward compatibility. separator = r"(?i)^\s*#?\s*(" + "|".join(valid_roles) + r")\s*:\s*\n" images = images or [] hash2images = {str(x): x for x in images} chunks = re.split(separator, chat_str, flags=re.MULTILINE) chat_list = [] for chunk in chunks: last_message = chat_list[-1] if len(chat_list) > 0 else None if last_message and "role" in last_message and "content" not in last_message: parsed_result = try_parse_name_and_content(chunk) if parsed_result is None: # "name" is required if the role is "function" if last_message["role"] == "function": raise ChatAPIFunctionRoleInvalidFormat( message="Failed to parse function role prompt. Please make sure the prompt follows the " "format: 'name:\\nfunction_name\\ncontent:\\nfunction_content'. " "'name' is required if role is function, and it should be the name of the function " "whose response is in the content. May contain a-z, A-Z, 0-9, and underscores, " "with a maximum length of 64 characters. See more details in " "https://platform.openai.com/docs/api-reference/chat/create#chat/create-name " "or view sample 'How to use functions with chat models' in our gallery.") # "name" is optional for other role types. else: last_message["content"] = to_content_str_or_list(chunk, hash2images) else: last_message["name"] = parsed_result[0] last_message["content"] = to_content_str_or_list(parsed_result[1], hash2images) else: if chunk.strip() == "": continue # Check if prompt follows chat api message format and has valid role. # References: https://platform.openai.com/docs/api-reference/chat/create. role = chunk.strip().lower() validate_role(role, valid_roles=valid_roles) new_message = {"role": role} chat_list.append(new_message) return chat_list def to_content_str_or_list(chat_str: str, hash2images: Mapping): chat_str = chat_str.strip() chunks = chat_str.split("\n") include_image = False result = [] for chunk in chunks: if chunk.strip() in hash2images: image_message = {} image_message["type"] = "image_url" image_url = hash2images[chunk.strip()].source_url \ if hasattr(hash2images[chunk.strip()], "source_url") else None if not image_url: image_bs64 = hash2images[chunk.strip()].to_base64() image_mine_type = hash2images[chunk.strip()]._mime_type image_url = {"url": f"data:{image_mine_type};base64,{image_bs64}"} image_message["image_url"] = image_url result.append(image_message) include_image = True elif chunk.strip() == "": continue else: result.append({"type": "text", "text": chunk}) return result if include_image else chat_str def handle_openai_error(tries: int = 10, delay: float = 8.0): """ A decorator function that used to handle OpenAI error. OpenAI Error falls into retriable vs non-retriable ones. For retriable error, the decorator use below parameters to control its retry activity with exponential backoff: `tries` : max times for the function invocation, type is int 'delay': base delay seconds for exponential delay, type is float """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): for i in range(tries + 1): try: return func(*args, **kwargs) except (SystemErrorException, UserErrorException) as e: # Throw inner wrapped exception directly raise e except (APIStatusError, APIConnectionError) as e: # Handle retriable exception, please refer to # https://platform.openai.com/docs/guides/error-codes/api-errors print(f"Exception occurs: {type(e).__name__}: {str(e)}", file=sys.stderr) if isinstance(e, APIConnectionError) and not isinstance(e, APITimeoutError) \ and "connection aborted" not in str(e).lower(): raise WrappedOpenAIError(e) # Retry InternalServerError(>=500), RateLimitError(429), UnprocessableEntityError(422) if isinstance(e, APIStatusError): status_code = e.response.status_code if status_code < 500 and status_code not in [429, 422]: raise WrappedOpenAIError(e) if isinstance(e, RateLimitError) and getattr(e, "type", None) == "insufficient_quota": # Exit retry if this is quota insufficient error print(f"{type(e).__name__} with insufficient quota. Throw user error.", file=sys.stderr) raise WrappedOpenAIError(e) if i == tries: # Exit retry if max retry reached print(f"{type(e).__name__} reached max retry. Exit retry with user error.", file=sys.stderr) raise ExceedMaxRetryTimes(e) if hasattr(e, 'response') and e.response is not None: retry_after_in_header = e.response.headers.get("retry-after", None) else: retry_after_in_header = None if not retry_after_in_header: retry_after_seconds = delay * (2 ** i) msg = ( f"{type(e).__name__} #{i}, but no Retry-After header, " + f"Back off {retry_after_seconds} seconds for retry." ) print(msg, file=sys.stderr) else: retry_after_seconds = float(retry_after_in_header) * (2 ** i) msg = ( f"{type(e).__name__} #{i}, Retry-After={retry_after_in_header}, " f"Back off {retry_after_seconds} seconds for retry." ) print(msg, file=sys.stderr) time.sleep(retry_after_seconds) except OpenAIError as e: # For other non-retriable errors from OpenAIError, # For example, AuthenticationError, APIConnectionError, BadRequestError, NotFoundError # Mark UserError for all the non-retriable OpenAIError print(f"Exception occurs: {type(e).__name__}: {str(e)}", file=sys.stderr) raise WrappedOpenAIError(e) except Exception as e: print(f"Exception occurs: {type(e).__name__}: {str(e)}", file=sys.stderr) error_message = f"OpenAI API hits exception: {type(e).__name__}: {str(e)}" raise LLMError(message=error_message) return wrapper return decorator def to_bool(value) -> bool: return str(value).lower() == "true" def render_jinja_template(prompt, trim_blocks=True, keep_trailing_newline=True, **kwargs): try: return Template(prompt, trim_blocks=trim_blocks, keep_trailing_newline=keep_trailing_newline).render(**kwargs) except Exception as e: # For exceptions raised by jinja2 module, mark UserError print(f"Exception occurs: {type(e).__name__}: {str(e)}", file=sys.stderr) error_message = f"Failed to render jinja template: {type(e).__name__}: {str(e)}. " \ + "Please modify your prompt to fix the issue." raise JinjaTemplateError(message=error_message) from e def process_function_call(function_call): if function_call is None: param = "auto" elif function_call == "auto" or function_call == "none": param = function_call else: function_call_example = json.dumps({"name": "function_name"}) common_tsg = f"Here is a valid example: {function_call_example}. See the guide at " \ "https://platform.openai.com/docs/api-reference/chat/create#chat/create-function_call " \ "or view sample 'How to call functions with chat models' in our gallery." param = function_call if not isinstance(param, dict): raise ChatAPIInvalidFunctions( message=f"function_call parameter '{param}' must be a dict, but not {type(function_call)}. {common_tsg}" ) else: if "name" not in function_call: raise ChatAPIInvalidFunctions( message=f'function_call parameter {json.dumps(param)} must contain "name" field. {common_tsg}' ) return param def post_process_chat_api_response(completion, stream, functions): if stream: if functions is not None: error_message = "Function calling has not been supported by stream mode yet." raise FunctionCallNotSupportedInStreamMode(message=error_message) def generator(): for chunk in completion: if chunk.choices: yield chunk.choices[0].delta.content if hasattr(chunk.choices[0].delta, 'content') and \ chunk.choices[0].delta.content is not None else "" # We must return the generator object, not using yield directly here. # Otherwise, the function itself will become a generator, despite whether stream is True or False. return generator() else: # When calling function, function_call response will be returned as a field in message, so we need return # message directly. Otherwise, we only return content. if functions is not None: return completion.model_dump()["choices"][0]["message"] else: # chat api may return message with no content. return getattr(completion.choices[0].message, "content", "") def preprocess_template_string(template_string: str) -> str: """Remove the image input decorator from the template string and place the image input in a new line.""" pattern = re.compile(r'\!\[(\s*image\s*)\]\(\{\{(\s*[^\s{}]+\s*)\}\}\)') # Find all matches in the input string matches = pattern.findall(template_string) # Perform substitutions for match in matches: original = f"![{match[0]}]({{{{{match[1]}}}}})" replacement = f"\n{{{{{match[1]}}}}}\n" template_string = template_string.replace(original, replacement) return template_string def convert_to_chat_list(obj): if isinstance(obj, dict): return {key: convert_to_chat_list(value) for key, value in obj.items()} elif isinstance(obj, list): return ChatInputList([convert_to_chat_list(item) for item in obj]) else: return obj def add_referenced_images_to_set(value, image_set, image_type): if isinstance(value, image_type): image_set.add(value) elif isinstance(value, list): for item in value: add_referenced_images_to_set(item, image_set, image_type) elif isinstance(value, dict): for _, item in value.items(): add_referenced_images_to_set(item, image_set, image_type) def find_referenced_image_set(kwargs: dict): referenced_images = set() try: from promptflow.contracts.multimedia import Image for _, value in kwargs.items(): add_referenced_images_to_set(value, referenced_images, Image) except ImportError: pass return referenced_images def normalize_connection_config(connection): """ Normalizes the configuration of a given connection object for compatibility. This function takes a connection object and normalizes its configuration, ensuring it is compatible and standardized for use. """ if isinstance(connection, AzureOpenAIConnection): return { "api_key": connection.api_key, "api_version": connection.api_version, "azure_endpoint": connection.api_base } elif isinstance(connection, OpenAIConnection): return { "api_key": connection.api_key, "organization": connection.organization, "base_url": connection.base_url } else: error_message = f"Not Support connection type '{type(connection).__name__}'. " \ f"Connection type should be in [AzureOpenAIConnection, OpenAIConnection]." raise InvalidConnectionType(message=error_message)
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/embedding.py
from enum import Enum from typing import Union from openai import AzureOpenAI as AzureOpenAIClient, OpenAI as OpenAIClient from promptflow.tools.common import handle_openai_error, normalize_connection_config from promptflow.tools.exception import InvalidConnectionType # Avoid circular dependencies: Use import 'from promptflow._internal' instead of 'from promptflow' # since the code here is in promptflow namespace as well from promptflow._internal import tool from promptflow.connections import AzureOpenAIConnection, OpenAIConnection class EmbeddingModel(str, Enum): TEXT_EMBEDDING_ADA_002 = "text-embedding-ada-002" TEXT_SEARCH_ADA_DOC_001 = "text-search-ada-doc-001" TEXT_SEARCH_ADA_QUERY_001 = "text-search-ada-query-001" @tool @handle_openai_error() def embedding(connection: Union[AzureOpenAIConnection, OpenAIConnection], input: str, deployment_name: str = "", model: EmbeddingModel = EmbeddingModel.TEXT_EMBEDDING_ADA_002): if isinstance(connection, AzureOpenAIConnection): client = AzureOpenAIClient(**normalize_connection_config(connection)) return client.embeddings.create( input=input, model=deployment_name, extra_headers={"ms-azure-ai-promptflow-called-from": "aoai-tool"} ).data[0].embedding elif isinstance(connection, OpenAIConnection): client = OpenAIClient(**normalize_connection_config(connection)) return client.embeddings.create( input=input, model=model ).data[0].embedding else: error_message = f"Not Support connection type '{type(connection).__name__}' for embedding api. " \ f"Connection type should be in [AzureOpenAIConnection, OpenAIConnection]." raise InvalidConnectionType(message=error_message)
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/openai.py
from enum import Enum try: from openai import OpenAI as OpenAIClient except Exception: raise Exception( "Please upgrade your OpenAI package to version 1.0.0 or later using the command: pip install --upgrade openai.") from promptflow.tools.common import render_jinja_template, handle_openai_error, \ parse_chat, to_bool, validate_functions, process_function_call, \ post_process_chat_api_response, normalize_connection_config # Avoid circular dependencies: Use import 'from promptflow._internal' instead of 'from promptflow' # since the code here is in promptflow namespace as well from promptflow._internal import ToolProvider, tool, register_apis from promptflow.connections import OpenAIConnection from promptflow.contracts.types import PromptTemplate class Engine(str, Enum): TEXT_DAVINCI_001 = "text-davinci-001" TEXT_DAVINCI_002 = "text-davinci-002" TEXT_DAVINCI_003 = "text-davinci-003" TEXT_CURIE_001 = "text-curie-001" TEXT_BABBAGE_001 = "text-babbage-001" TEXT_ADA_001 = "text-ada-001" CODE_CUSHMAN_001 = "code-cushman-001" CODE_DAVINCI_002 = "code-davinci-002" class OpenAI(ToolProvider): def __init__(self, connection: OpenAIConnection): super().__init__() self._connection_dict = normalize_connection_config(connection) self._client = OpenAIClient(**self._connection_dict) @tool @handle_openai_error() def completion( self, prompt: PromptTemplate, model: Engine = Engine.TEXT_DAVINCI_003, suffix: str = None, max_tokens: int = 16, temperature: float = 1.0, top_p: float = 1.0, n: int = 1, # stream is a hidden to the end user, it is only supposed to be set by the executor. stream: bool = False, logprobs: int = None, echo: bool = False, stop: list = None, presence_penalty: float = 0, frequency_penalty: float = 0, best_of: int = 1, logit_bias: dict = {}, user: str = "", **kwargs, ) -> str: prompt = render_jinja_template(prompt, trim_blocks=True, keep_trailing_newline=True, **kwargs) # TODO: remove below type conversion after client can pass json rather than string. echo = to_bool(echo) stream = to_bool(stream) response = self._client.completions.create( prompt=prompt, model=model.value if isinstance(model, Enum) else model, # empty string suffix should be treated as None. suffix=suffix if suffix else None, max_tokens=int(max_tokens), temperature=float(temperature), top_p=float(top_p), n=int(n), stream=stream, logprobs=int(logprobs) if logprobs else None, echo=echo, stop=stop if stop else None, presence_penalty=float(presence_penalty), frequency_penalty=float(frequency_penalty), best_of=int(best_of), # Logit bias must be a dict if we passed it to openai api. logit_bias=logit_bias if logit_bias else {}, user=user ) if stream: def generator(): for chunk in response: if chunk.choices: yield getattr(chunk.choices[0], "text", "") # We must return the generator object, not using yield directly here. # Otherwise, the function itself will become a generator, despite whether stream is True or False. return generator() else: # get first element because prompt is single. return response.choices[0].text @tool @handle_openai_error() def chat( self, prompt: PromptTemplate, model: str = "gpt-3.5-turbo", temperature: float = 1.0, top_p: float = 1.0, n: int = 1, # stream is a hidden to the end user, it is only supposed to be set by the executor. stream: bool = False, stop: list = None, max_tokens: int = None, presence_penalty: float = 0, frequency_penalty: float = 0, logit_bias: dict = {}, user: str = "", # function_call can be of type str or dict. function_call: object = None, functions: list = None, response_format: object = None, **kwargs ) -> [str, dict]: chat_str = render_jinja_template(prompt, trim_blocks=True, keep_trailing_newline=True, **kwargs) messages = parse_chat(chat_str) # TODO: remove below type conversion after client can pass json rather than string. stream = to_bool(stream) params = { "model": model, "messages": messages, "temperature": float(temperature), "top_p": float(top_p), "n": int(n), "stream": stream, "stop": stop if stop else None, "max_tokens": int(max_tokens) if max_tokens is not None and str(max_tokens).lower() != "inf" else None, "presence_penalty": float(presence_penalty), "frequency_penalty": float(frequency_penalty), "logit_bias": logit_bias, "user": user, "response_format": response_format } if functions is not None: validate_functions(functions) params["functions"] = functions params["function_call"] = process_function_call(function_call) completion = self._client.chat.completions.create(**params) return post_process_chat_api_response(completion, stream, functions) register_apis(OpenAI) @tool def completion( connection: OpenAIConnection, prompt: PromptTemplate, model: Engine = Engine.TEXT_DAVINCI_003, suffix: str = None, max_tokens: int = 16, temperature: float = 1.0, top_p: float = 1, n: int = 1, stream: bool = False, logprobs: int = None, echo: bool = False, stop: list = None, presence_penalty: float = 0, frequency_penalty: float = 0, best_of: int = 1, logit_bias: dict = {}, user: str = "", **kwargs ) -> [str, dict]: return OpenAI(connection).completion( prompt=prompt, model=model, suffix=suffix, max_tokens=max_tokens, temperature=temperature, top_p=top_p, n=n, stream=stream, logprobs=logprobs, echo=echo, stop=stop if stop else None, presence_penalty=presence_penalty, frequency_penalty=frequency_penalty, best_of=best_of, logit_bias=logit_bias, user=user, **kwargs, ) @tool def chat( connection: OpenAIConnection, prompt: PromptTemplate, model: str = "gpt-3.5-turbo", temperature: float = 1, top_p: float = 1, n: int = 1, stream: bool = False, stop: list = None, max_tokens: int = None, presence_penalty: float = 0, frequency_penalty: float = 0, logit_bias: dict = {}, user: str = "", function_call: object = None, functions: list = None, response_format: object = None, **kwargs ) -> [str, dict]: return OpenAI(connection).chat( prompt=prompt, model=model, temperature=temperature, top_p=top_p, n=n, stream=stream, stop=stop if stop else None, max_tokens=max_tokens, presence_penalty=presence_penalty, frequency_penalty=frequency_penalty, logit_bias=logit_bias, user=user, function_call=function_call, functions=functions, response_format=response_format, **kwargs, )
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/aoai.py
import json try: from openai import AzureOpenAI as AzureOpenAIClient except Exception: raise Exception( "Please upgrade your OpenAI package to version 1.0.0 or later using the command: pip install --upgrade openai.") from promptflow.tools.common import render_jinja_template, handle_openai_error, parse_chat, to_bool, \ validate_functions, process_function_call, post_process_chat_api_response, normalize_connection_config # Avoid circular dependencies: Use import 'from promptflow._internal' instead of 'from promptflow' # since the code here is in promptflow namespace as well from promptflow._internal import enable_cache, ToolProvider, tool, register_apis from promptflow.connections import AzureOpenAIConnection from promptflow.contracts.types import PromptTemplate class AzureOpenAI(ToolProvider): def __init__(self, connection: AzureOpenAIConnection): super().__init__() self.connection = connection self._connection_dict = normalize_connection_config(self.connection) self._client = AzureOpenAIClient(**self._connection_dict) def calculate_cache_string_for_completion( self, **kwargs, ) -> str: d = dict(self.connection) d.pop("api_key") d.update({**kwargs}) return json.dumps(d) @tool @handle_openai_error() @enable_cache(calculate_cache_string_for_completion) def completion( self, prompt: PromptTemplate, # for AOAI, deployment name is customized by user, not model name. deployment_name: str, suffix: str = None, max_tokens: int = 16, temperature: float = 1.0, top_p: float = 1.0, n: int = 1, # stream is a hidden to the end user, it is only supposed to be set by the executor. stream: bool = False, logprobs: int = None, echo: bool = False, stop: list = None, presence_penalty: float = 0, frequency_penalty: float = 0, best_of: int = 1, logit_bias: dict = {}, user: str = "", **kwargs, ) -> str: prompt = render_jinja_template(prompt, trim_blocks=True, keep_trailing_newline=True, **kwargs) # TODO: remove below type conversion after client can pass json rather than string. echo = to_bool(echo) stream = to_bool(stream) response = self._client.completions.create( prompt=prompt, model=deployment_name, # empty string suffix should be treated as None. suffix=suffix if suffix else None, max_tokens=int(max_tokens), temperature=float(temperature), top_p=float(top_p), n=int(n), stream=stream, # TODO: remove below type conversion after client pass json rather than string. # empty string will go to else branch, but original api cannot accept empty # string, must be None. logprobs=int(logprobs) if logprobs else None, echo=echo, # fix bug "[] is not valid under any of the given schemas-'stop'" stop=stop if stop else None, presence_penalty=float(presence_penalty), frequency_penalty=float(frequency_penalty), best_of=int(best_of), # Logit bias must be a dict if we passed it to openai api. logit_bias=logit_bias if logit_bias else {}, user=user, extra_headers={"ms-azure-ai-promptflow-called-from": "aoai-tool"}) if stream: def generator(): for chunk in response: if chunk.choices: yield chunk.choices[0].text if hasattr(chunk.choices[0], 'text') and \ chunk.choices[0].text is not None else "" # We must return the generator object, not using yield directly here. # Otherwise, the function itself will become a generator, despite whether stream is True or False. return generator() else: # get first element because prompt is single. return response.choices[0].text @tool @handle_openai_error() def chat( self, prompt: PromptTemplate, # for AOAI, deployment name is customized by user, not model name. deployment_name: str, temperature: float = 1.0, top_p: float = 1.0, n: int = 1, # stream is a hidden to the end user, it is only supposed to be set by the executor. stream: bool = False, stop: list = None, max_tokens: int = None, presence_penalty: float = 0, frequency_penalty: float = 0, logit_bias: dict = {}, user: str = "", # function_call can be of type str or dict. function_call: object = None, functions: list = None, response_format: object = None, **kwargs, ) -> [str, dict]: # keep_trailing_newline=True is to keep the last \n in the prompt to avoid converting "user:\t\n" to "user:". chat_str = render_jinja_template(prompt, trim_blocks=True, keep_trailing_newline=True, **kwargs) messages = parse_chat(chat_str) # TODO: remove below type conversion after client can pass json rather than string. stream = to_bool(stream) params = { "model": deployment_name, "messages": messages, "temperature": float(temperature), "top_p": float(top_p), "n": int(n), "stream": stream, "stop": stop if stop else None, "max_tokens": int(max_tokens) if max_tokens is not None and str(max_tokens).lower() != "inf" else None, "presence_penalty": float(presence_penalty), "frequency_penalty": float(frequency_penalty), "logit_bias": logit_bias, "user": user, "response_format": response_format, "extra_headers": {"ms-azure-ai-promptflow-called-from": "aoai-tool"} } if functions is not None: validate_functions(functions) params["functions"] = functions params["function_call"] = process_function_call(function_call) completion = self._client.chat.completions.create(**params) return post_process_chat_api_response(completion, stream, functions) register_apis(AzureOpenAI) @tool def completion( connection: AzureOpenAIConnection, prompt: PromptTemplate, deployment_name: str, suffix: str = None, max_tokens: int = 16, temperature: float = 1.0, top_p: float = 1, n: int = 1, stream: bool = False, logprobs: int = None, echo: bool = False, stop: list = None, presence_penalty: float = 0, frequency_penalty: float = 0, best_of: int = 1, logit_bias: dict = {}, user: str = "", **kwargs, ) -> str: return AzureOpenAI(connection).completion( prompt=prompt, deployment_name=deployment_name, suffix=suffix, max_tokens=max_tokens, temperature=temperature, top_p=top_p, n=n, stream=stream, logprobs=logprobs, echo=echo, stop=stop if stop else None, presence_penalty=presence_penalty, frequency_penalty=frequency_penalty, best_of=best_of, logit_bias=logit_bias, user=user, **kwargs, ) @tool def chat( connection: AzureOpenAIConnection, prompt: PromptTemplate, deployment_name: str, temperature: float = 1, top_p: float = 1, n: int = 1, stream: bool = False, stop: list = None, max_tokens: int = None, presence_penalty: float = 0, frequency_penalty: float = 0, logit_bias: dict = {}, user: str = "", function_call: object = None, functions: list = None, response_format: object = None, **kwargs, ) -> str: # chat model is not available in azure openai, so need to set the environment variable. return AzureOpenAI(connection).chat( prompt=prompt, deployment_name=deployment_name, temperature=temperature, top_p=top_p, n=n, stream=stream, stop=stop if stop else None, max_tokens=max_tokens, presence_penalty=presence_penalty, frequency_penalty=frequency_penalty, logit_bias=logit_bias, user=user, function_call=function_call, functions=functions, response_format=response_format, **kwargs, )
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/openai_gpt4v.py
try: from openai import OpenAI as OpenAIClient except Exception: raise Exception( "Please upgrade your OpenAI package to version 1.0.0 or later using the command: pip install --upgrade openai.") from promptflow.connections import OpenAIConnection from promptflow.contracts.types import PromptTemplate from promptflow._internal import ToolProvider, tool from promptflow.tools.common import render_jinja_template, handle_openai_error, \ parse_chat, post_process_chat_api_response, preprocess_template_string, \ find_referenced_image_set, convert_to_chat_list, normalize_connection_config class OpenAI(ToolProvider): def __init__(self, connection: OpenAIConnection): super().__init__() self._connection_dict = normalize_connection_config(connection) self._client = OpenAIClient(**self._connection_dict) @tool(streaming_option_parameter="stream") @handle_openai_error() def chat( self, prompt: PromptTemplate, model: str = "gpt-4-vision-preview", temperature: float = 1.0, top_p: float = 1.0, # stream is a hidden to the end user, it is only supposed to be set by the executor. stream: bool = False, stop: list = None, max_tokens: int = None, presence_penalty: float = 0, frequency_penalty: float = 0, **kwargs, ) -> [str, dict]: # keep_trailing_newline=True is to keep the last \n in the prompt to avoid converting "user:\t\n" to "user:". prompt = preprocess_template_string(prompt) referenced_images = find_referenced_image_set(kwargs) # convert list type into ChatInputList type converted_kwargs = convert_to_chat_list(kwargs) chat_str = render_jinja_template(prompt, trim_blocks=True, keep_trailing_newline=True, **converted_kwargs) messages = parse_chat(chat_str, list(referenced_images)) params = { "model": model, "messages": messages, "temperature": temperature, "top_p": top_p, "n": 1, "stream": stream, "presence_penalty": presence_penalty, "frequency_penalty": frequency_penalty, } if stop: params["stop"] = stop if max_tokens is not None: params["max_tokens"] = max_tokens completion = self._client.chat.completions.create(**params) return post_process_chat_api_response(completion, stream, None)
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/open_model_llm.py
import functools import json import os import re import requests import sys import time import tempfile from abc import abstractmethod from datetime import datetime, timedelta from enum import Enum from typing import Any, Dict, List, Tuple, Optional, Union from promptflow._core.tool import ToolProvider, tool from promptflow._sdk._constants import ConnectionType from promptflow.connections import CustomConnection from promptflow.contracts.types import PromptTemplate from promptflow.tools.common import render_jinja_template, validate_role from promptflow.tools.exception import ( OpenModelLLMOnlineEndpointError, OpenModelLLMUserError, OpenModelLLMKeyValidationError, ChatAPIInvalidRole ) DEPLOYMENT_DEFAULT = "default" CONNECTION_CACHE_FILE = "pf_connection_names" VALID_LLAMA_ROLES = {"system", "user", "assistant"} AUTH_REQUIRED_CONNECTION_TYPES = {"serverlessendpoint", "onlineendpoint", "connection"} REQUIRED_CONFIG_KEYS = ["endpoint_url", "model_family"] REQUIRED_SECRET_KEYS = ["endpoint_api_key"] ENDPOINT_REQUIRED_ENV_VARS = ["AZUREML_ARM_SUBSCRIPTION", "AZUREML_ARM_RESOURCEGROUP", "AZUREML_ARM_WORKSPACE_NAME"] def handle_online_endpoint_error(max_retries: int = 5, initial_delay: float = 2, exponential_base: float = 3): def deco_retry(func): @functools.wraps(func) def wrapper(*args, **kwargs): delay = initial_delay for i in range(max_retries): try: return func(*args, **kwargs) except OpenModelLLMOnlineEndpointError as e: if i == max_retries - 1: error_message = f"Exception hit calling Online Endpoint: {type(e).__name__}: {str(e)}" print(error_message, file=sys.stderr) raise OpenModelLLMOnlineEndpointError(message=error_message) delay *= exponential_base time.sleep(delay) return wrapper return deco_retry class ConnectionCache: def __init__(self, use_until: datetime, subscription_id: str, resource_group: str, workspace_name: str, connection_names: List[str]): self.use_until = use_until self.subscription_id = subscription_id self.resource_group = resource_group self.workspace_name = workspace_name self.connection_names = connection_names @classmethod def from_filename(self, file): cache = json.load(file) return self(cache['use_until'], cache['subscription_id'], cache['resource_group'], cache['workspace_name'], cache['connection_names']) def can_use(self, subscription_id: str, resource_group: str, workspace_name: str): use_until_time = datetime.fromisoformat(self.use_until) return (use_until_time > datetime.now() and self.subscription_id == subscription_id and self.resource_group == resource_group and self.workspace_name == workspace_name) class Endpoint: def __init__(self, endpoint_name: str, endpoint_url: str, endpoint_api_key: str): self.deployments: List[Deployment] = [] self.default_deployment: Deployment = None self.endpoint_url = endpoint_url self.endpoint_api_key = endpoint_api_key self.endpoint_name = endpoint_name class Deployment: def __init__(self, deployment_name: str, model_family: str): self.model_family = model_family self.deployment_name = deployment_name class ServerlessEndpointsContainer: API_VERSION = "2023-08-01-preview" def _get_headers(self, token: str) -> Dict[str, str]: headers = { "Authorization": f"Bearer {token}", "Content-Type": "application/json", } return headers def get_serverless_arm_url(self, subscription_id, resource_group, workspace_name, suffix=None): suffix = "" if suffix is None else f"/{suffix}" return f"https://management.azure.com/subscriptions/{subscription_id}" \ + f"/resourceGroups/{resource_group}/providers/Microsoft.MachineLearningServices" \ + f"/workspaces/{workspace_name}/serverlessEndpoints{suffix}?api-version={self.API_VERSION}" def _list(self, token: str, subscription_id: str, resource_group: str, workspace_name: str): headers = self._get_headers(token) url = self.get_serverless_arm_url(subscription_id, resource_group, workspace_name) try: response = requests.get(url, headers=headers, timeout=50) return json.loads(response.content)['value'] except Exception as e: print(f"Error encountered when listing serverless endpoints. Exception: {e}", file=sys.stderr) return [] def _validate_model_family(self, serverless_endpoint): try: if serverless_endpoint.get('properties', {}).get('provisioningState') != "Succeeded": return None if (try_get_from_dict(serverless_endpoint, ['properties', 'offer', 'publisher']) == 'Meta' and "llama" in try_get_from_dict(serverless_endpoint, ['properties', 'offer', 'offerName'])): return ModelFamily.LLAMA if (try_get_from_dict(serverless_endpoint, ['properties', 'marketplaceInfo', 'publisherId']) == 'metagenai' and "llama" in try_get_from_dict(serverless_endpoint, ['properties', 'marketplaceInfo', 'offerId'])): return ModelFamily.LLAMA except Exception as ex: print(f"Ignoring endpoint {serverless_endpoint['id']} due to error: {ex}", file=sys.stderr) return None def list_serverless_endpoints(self, token, subscription_id, resource_group, workspace_name, return_endpoint_url: bool = False): serverlessEndpoints = self._list(token, subscription_id, resource_group, workspace_name) result = [] for e in serverlessEndpoints: if (self._validate_model_family(e)): result.append({ "value": f"serverlessEndpoint/{e['name']}", "display_value": f"[Serverless] {e['name']}", # "hyperlink": self.get_endpoint_url(e.endpoint_name) "description": f"Serverless Endpoint: {e['name']}", }) if return_endpoint_url: result[-1]['url'] = try_get_from_dict(e, ['properties', 'inferenceEndpoint', 'uri']) return result def _list_endpoint_key(self, token: str, subscription_id: str, resource_group: str, workspace_name: str, serverless_endpoint_name: str): headers = self._get_headers(token) url = self.get_serverless_arm_url(subscription_id, resource_group, workspace_name, f"{serverless_endpoint_name}/listKeys") try: response = requests.post(url, headers=headers, timeout=50) return json.loads(response.content) except Exception as e: print(f"Unable to get key from selected serverless endpoint. Exception: {e}", file=sys.stderr) def get_serverless_endpoint(self, token: str, subscription_id: str, resource_group: str, workspace_name: str, serverless_endpoint_name: str): headers = self._get_headers(token) url = self.get_serverless_arm_url(subscription_id, resource_group, workspace_name, serverless_endpoint_name) try: response = requests.get(url, headers=headers, timeout=50) return json.loads(response.content) except Exception as e: print(f"Unable to get selected serverless endpoint. Exception: {e}", file=sys.stderr) def get_serverless_endpoint_key(self, token: str, subscription_id: str, resource_group: str, workspace_name: str, serverless_endpoint_name: str) -> Tuple[str, str, str]: endpoint = self.get_serverless_endpoint(token, subscription_id, resource_group, workspace_name, serverless_endpoint_name) endpoint_url = try_get_from_dict(endpoint, ['properties', 'inferenceEndpoint', 'uri']) model_family = self._validate_model_family(endpoint) endpoint_api_key = self._list_endpoint_key(token, subscription_id, resource_group, workspace_name, serverless_endpoint_name)['primaryKey'] return (endpoint_url, endpoint_api_key, model_family) class CustomConnectionsContainer: def get_azure_custom_connection_names(self, credential, subscription_id: str, resource_group_name: str, workspace_name: str, return_endpoint_url: bool = False ) -> List[Dict[str, Union[str, int, float, list, Dict]]]: result = [] try: from promptflow.azure import PFClient as AzurePFClient azure_pf_client = AzurePFClient( credential=credential, subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name) except Exception: message = "Skipping Azure PFClient. To connect, please ensure the following environment variables are set: " message += ",".join(ENDPOINT_REQUIRED_ENV_VARS) print(message, file=sys.stderr) return result connections = azure_pf_client._connections.list() for c in connections: if c.type == ConnectionType.CUSTOM and "model_family" in c.configs: try: validate_model_family(c.configs["model_family"]) result.append({ "value": f"connection/{c.name}", "display_value": f"[Connection] {c.name}", # "hyperlink": "", "description": f"Custom Connection: {c.name}", }) if return_endpoint_url: result[-1]['url'] = c.configs['endpoint_url'] except Exception: # silently ignore unsupported model family continue return result def get_local_custom_connection_names(self, return_endpoint_url: bool = False ) -> List[Dict[str, Union[str, int, float, list, Dict]]]: result = [] try: from promptflow import PFClient as LocalPFClient except Exception as e: print(f"Skipping Local PFClient. Exception: {e}", file=sys.stderr) return result pf = LocalPFClient() connections = pf.connections.list() for c in connections: if c.type == ConnectionType.CUSTOM and "model_family" in c.configs: try: validate_model_family(c.configs["model_family"]) result.append({ "value": f"localConnection/{c.name}", "display_value": f"[Local Connection] {c.name}", # "hyperlink": "", "description": f"Local Custom Connection: {c.name}", }) if return_endpoint_url: result[-1]['url'] = c.configs['endpoint_url'] except Exception: # silently ignore unsupported model family continue return result def get_endpoint_from_local_custom_connection(self, connection_name) -> Tuple[str, str, str]: from promptflow import PFClient as LocalPFClient pf = LocalPFClient() connection = pf.connections.get(connection_name, with_secrets=True) return self.get_endpoint_from_custom_connection(connection) def get_endpoint_from_azure_custom_connection(self, credential, subscription_id, resource_group_name, workspace_name, connection_name) -> Tuple[str, str, str]: from promptflow.azure import PFClient as AzurePFClient azure_pf_client = AzurePFClient( credential=credential, subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name) connection = azure_pf_client._arm_connections.get(connection_name) return self.get_endpoint_from_custom_connection(connection) def get_endpoint_from_custom_connection(self, connection: CustomConnection) -> Tuple[str, str, str]: conn_dict = dict(connection) for key in REQUIRED_CONFIG_KEYS: if key not in conn_dict: accepted_keys = ",".join([key for key in REQUIRED_CONFIG_KEYS]) raise OpenModelLLMKeyValidationError( message=f"""Required key `{key}` not found in given custom connection. Required keys are: {accepted_keys}.""" ) for key in REQUIRED_SECRET_KEYS: if key not in conn_dict: accepted_keys = ",".join([key for key in REQUIRED_SECRET_KEYS]) raise OpenModelLLMKeyValidationError( message=f"""Required secret key `{key}` not found in given custom connection. Required keys are: {accepted_keys}.""" ) model_family = validate_model_family(connection.configs['model_family']) return (connection.configs['endpoint_url'], connection.secrets['endpoint_api_key'], model_family) def list_custom_connection_names(self, credential, subscription_id: str, resource_group_name: str, workspace_name: str, return_endpoint_url: bool = False ) -> List[Dict[str, Union[str, int, float, list, Dict]]]: azure_custom_connections = self.get_azure_custom_connection_names(credential, subscription_id, resource_group_name, workspace_name, return_endpoint_url) local_custom_connections = self.get_local_custom_connection_names(return_endpoint_url) return azure_custom_connections + local_custom_connections class EndpointsContainer: def get_ml_client(self, credential, subscription_id: str, resource_group_name: str, workspace_name: str): try: from azure.ai.ml import MLClient return MLClient( credential=credential, subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name) except Exception as e: message = "Unable to connect to AzureML. Please ensure the following environment variables are set: " message += ",".join(ENDPOINT_REQUIRED_ENV_VARS) message += "\nException: " + str(e) raise OpenModelLLMOnlineEndpointError(message=message) def get_endpoints_and_deployments(self, credential, subscription_id: str, resource_group_name: str, workspace_name: str) -> List[Endpoint]: ml_client = self.get_ml_client(credential, subscription_id, resource_group_name, workspace_name) list_of_endpoints: List[Endpoint] = [] for ep in ml_client.online_endpoints.list(): endpoint = Endpoint( endpoint_name=ep.name, endpoint_url=ep.scoring_uri, endpoint_api_key=ml_client.online_endpoints.get_keys(ep.name).primary_key) ordered_deployment_names = sorted(ep.traffic, key=lambda item: item[1]) deployments = ml_client.online_deployments.list(ep.name) for deployment_name in ordered_deployment_names: for d in deployments: if d.name == deployment_name: model_family = get_model_type(d.model) if model_family is None: continue deployment = Deployment(deployment_name=d.name, model_family=model_family) endpoint.deployments.append(deployment) # Deployment are ordered by traffic level, first in is default if endpoint.default_deployment is None: endpoint.default_deployment = deployment if len(endpoint.deployments) > 0: list_of_endpoints.append(endpoint) self.__endpoints_and_deployments = list_of_endpoints return self.__endpoints_and_deployments def get_endpoint_url(self, endpoint_name, subscription_id, resource_group_name, workspace_name): return f"https://ml.azure.com/endpoints/realtime/{endpoint_name}" \ + f"/detail?wsid=/subscriptions/{subscription_id}" \ + f"/resourceGroups/{resource_group_name}" \ + f"/providers/Microsoft.MachineLearningServices/workspaces/{workspace_name}" def list_endpoint_names(self, credential, subscription_id, resource_group_name, workspace_name, return_endpoint_url: bool = False ) -> List[Dict[str, Union[str, int, float, list, Dict]]]: '''Function for listing endpoints in the UX''' endpoints_and_deployments = self.get_endpoints_and_deployments( credential, subscription_id, resource_group_name, workspace_name) result = [] for e in endpoints_and_deployments: result.append({ "value": f"onlineEndpoint/{e.endpoint_name}", "display_value": f"[Online] {e.endpoint_name}", "hyperlink": self.get_endpoint_url(e.endpoint_name, subscription_id, resource_group_name, workspace_name), "description": f"Online Endpoint: {e.endpoint_name}", }) if return_endpoint_url: result[-1]['url'] = e.endpoint_url return result def list_deployment_names(self, credential, subscription_id, resource_group_name, workspace_name, endpoint_name: str ) -> List[Dict[str, Union[str, int, float, list, Dict]]]: '''Function for listing deployments in the UX''' if endpoint_name is None: return [] endpoints_and_deployments = self.get_endpoints_and_deployments( credential, subscription_id, resource_group_name, workspace_name) for endpoint in endpoints_and_deployments: if endpoint.endpoint_name == endpoint_name: result = [] for d in endpoint.deployments: result.append({ "value": d.deployment_name, "display_value": d.deployment_name, # "hyperlink": '', "description": f"this is {d.deployment_name} item", }) return result return [] ENDPOINT_CONTAINER = EndpointsContainer() CUSTOM_CONNECTION_CONTAINER = CustomConnectionsContainer() SERVERLESS_ENDPOINT_CONTAINER = ServerlessEndpointsContainer() def is_serverless_endpoint(endpoint_url: str) -> bool: return "serverless.ml.azure.com" in endpoint_url or "inference.ai.azure.com" in endpoint_url def try_get_from_dict(some_dict: Dict, key_list: List): for key in key_list: if some_dict is None: return some_dict elif key in some_dict: some_dict = some_dict[key] else: return None return some_dict def parse_endpoint_connection_type(endpoint_connection_name: str) -> Tuple[str, str]: endpoint_connection_details = endpoint_connection_name.split("/") return (endpoint_connection_details[0].lower(), endpoint_connection_details[1]) def list_endpoint_names(subscription_id: str, resource_group_name: str, workspace_name: str, return_endpoint_url: bool = False, force_refresh: bool = False) -> List[Dict[str, Union[str, int, float, list, Dict]]]: cache_file_path = None try: with tempfile.NamedTemporaryFile(delete=False) as temp_file: cache_file_path = os.path.join(os.path.dirname(temp_file.name), CONNECTION_CACHE_FILE) print(f"Attempting to read connection cache. File path: {cache_file_path}", file=sys.stdout) if force_refresh: print("....skipping. force_refresh is True", file=sys.stdout) else: with open(cache_file_path, 'r') as file: cache = ConnectionCache.from_filename(file) if cache.can_use(subscription_id, resource_group_name, workspace_name): if len(cache.connection_names) > 0: print("....using Connection Cache File", file=sys.stdout) return cache.connection_names else: print("....skipping. No connections in file", file=sys.stdout) else: print("....skipping. File not relevant", file=sys.stdout) except Exception as e: print(f"....failed to find\\read connection cache file. Regenerating. Error:{e}", file=sys.stdout) try: from azure.identity import DefaultAzureCredential credential = DefaultAzureCredential(exclude_interactive_browser_credential=False) token = credential.get_token("https://management.azure.com/.default").token except Exception as e: print(f"Skipping list_endpoint_names. Exception: {e}", file=sys.stderr) msg = "Exception getting token: Please retry" return [{"value": msg, "display_value": msg, "description": msg}] serverless_endpoints = SERVERLESS_ENDPOINT_CONTAINER.list_serverless_endpoints(token, subscription_id, resource_group_name, workspace_name, return_endpoint_url) online_endpoints = ENDPOINT_CONTAINER.list_endpoint_names(credential, subscription_id, resource_group_name, workspace_name, return_endpoint_url) custom_connections = CUSTOM_CONNECTION_CONTAINER.list_custom_connection_names(credential, subscription_id, resource_group_name, workspace_name, return_endpoint_url) list_of_endpoints = custom_connections + serverless_endpoints + online_endpoints cache = ConnectionCache(use_until=(datetime.now() + timedelta(minutes=5)).isoformat(), subscription_id=subscription_id, resource_group=resource_group_name, workspace_name=workspace_name, connection_names=list_of_endpoints) if len(list_of_endpoints) == 0: msg = "No endpoints found. Please add a connection." return [{"value": msg, "display_value": msg, "description": msg}] if cache_file_path is not None: try: print(f"Attempting to write connection cache. File path: {cache_file_path}", file=sys.stdout) with open(cache_file_path, 'w') as file: json.dump(cache, file, default=lambda obj: obj.__dict__) print("....written", file=sys.stdout) except Exception as e: print(f"""....failed to write connection cache file. Will need to reload next time. Error:{e}""", file=sys.stdout) return list_of_endpoints def list_deployment_names(subscription_id: str, resource_group_name: str, workspace_name: str, endpoint: str = None) -> List[Dict[str, Union[str, int, float, list, Dict]]]: deployment_default_list = [{ "value": DEPLOYMENT_DEFAULT, "display_value": DEPLOYMENT_DEFAULT, "description": "This will use the default deployment for the selected online endpoint." + "You can also manually enter a deployment name here." }] if endpoint is None or endpoint.strip() == "" or "/" not in endpoint: return deployment_default_list (endpoint_connection_type, endpoint_connection_name) = parse_endpoint_connection_type(endpoint) if endpoint_connection_type != "onlineendpoint": return deployment_default_list try: from azure.identity import DefaultAzureCredential credential = DefaultAzureCredential(exclude_interactive_browser_credential=False) except Exception as e: print(f"Skipping list_deployment_names. Exception: {e}", file=sys.stderr) return deployment_default_list return deployment_default_list + ENDPOINT_CONTAINER.list_deployment_names( credential, subscription_id, resource_group_name, workspace_name, endpoint_connection_name ) def get_model_type(deployment_model: str) -> str: m = re.match(r'azureml://registries/[^/]+/models/([^/]+)/versions/', deployment_model) if m is None: print(f"Unexpected model format: {deployment_model}. Skipping", file=sys.stdout) return None model = m[1].lower() if model.startswith("llama-2"): return ModelFamily.LLAMA elif model.startswith("tiiuae-falcon"): return ModelFamily.FALCON elif model.startswith("databricks-dolly-v2"): return ModelFamily.DOLLY elif model.startswith("gpt2"): return ModelFamily.GPT2 else: # Not found and\or handled. Ignore this endpoint\deployment print(f"Unexpected model type: {model} derived from deployed model: {deployment_model}") return None def validate_model_family(model_family: str): try: return ModelFamily[model_family] except KeyError: accepted_models = ",".join([model.name for model in ModelFamily]) raise OpenModelLLMKeyValidationError( message=f"""Given model_family '{model_family}' not recognized. Supported models are: {accepted_models}.""" ) class ModelFamily(str, Enum): LLAMA = "LLaMa" DOLLY = "Dolly" GPT2 = "GPT-2" FALCON = "Falcon" @classmethod def _missing_(cls, value): value = value.lower() for member in cls: if member.lower() == value: return member return None STANDARD_CONTRACT_MODELS = [ModelFamily.DOLLY, ModelFamily.GPT2, ModelFamily.FALCON] class API(str, Enum): CHAT = "chat" COMPLETION = "completion" class ContentFormatterBase: """Transform request and response of AzureML endpoint to match with required schema. """ content_type: Optional[str] = "application/json" """The MIME type of the input data passed to the endpoint""" accepts: Optional[str] = "application/json" """The MIME type of the response data returned from the endpoint""" @staticmethod def escape_special_characters(prompt: str) -> str: """Escapes any special characters in `prompt`""" return re.sub( r'\\([\\\"a-zA-Z])', r'\\\1', prompt) @staticmethod def parse_chat(chat_str: str) -> List[Dict[str, str]]: # LLaMa only supports below roles. separator = r"(?i)\n*(system|user|assistant)\s*:\s*\n" chunks = re.split(separator, chat_str) # remove any empty chunks chunks = [c.strip() for c in chunks if c.strip()] chat_list = [] for index in range(0, len(chunks), 2): role = chunks[index].lower() # Check if prompt follows chat api message format and has valid role. try: validate_role(role, VALID_LLAMA_ROLES) except ChatAPIInvalidRole as e: raise OpenModelLLMUserError(message=e.message) if len(chunks) <= index + 1: message = "Unexpected chat format. Please ensure the query matches the chat format of the model used." raise OpenModelLLMUserError(message=message) chat_list.append({ "role": role, "content": chunks[index+1] }) return chat_list @abstractmethod def format_request_payload(self, prompt: str, model_kwargs: Dict) -> str: """Formats the request body according to the input schema of the model. Returns bytes or seekable file like object in the format specified in the content_type request header. """ @abstractmethod def format_response_payload(self, output: bytes) -> str: """Formats the response body according to the output schema of the model. Returns the data type that is received from the response. """ class MIRCompleteFormatter(ContentFormatterBase): """Content handler for LLMs from the HuggingFace catalog.""" def format_request_payload(self, prompt: str, model_kwargs: Dict) -> str: input_str = json.dumps( { "input_data": {"input_string": [ContentFormatterBase.escape_special_characters(prompt)]}, "parameters": model_kwargs, } ) return input_str def format_response_payload(self, output: bytes) -> str: """These models only support generation - expect a single output style""" response_json = json.loads(output) if len(response_json) > 0 and "0" in response_json[0]: if "0" in response_json[0]: return response_json[0]["0"] elif "output" in response_json: return response_json["output"] error_message = f"Unexpected response format. Response: {response_json}" print(error_message, file=sys.stderr) raise OpenSourceLLMOnlineEndpointError(message=error_message) class LlamaContentFormatter(ContentFormatterBase): """Content formatter for LLaMa""" def __init__(self, api: API, chat_history: Optional[str] = ""): super().__init__() self.api = api self.chat_history = chat_history def format_request_payload(self, prompt: str, model_kwargs: Dict) -> str: """Formats the request according the the chosen api""" if "do_sample" not in model_kwargs: model_kwargs["do_sample"] = True if self.api == API.CHAT: prompt_value = ContentFormatterBase.parse_chat(self.chat_history) else: prompt_value = [ContentFormatterBase.escape_special_characters(prompt)] return json.dumps( { "input_data": { "input_string": prompt_value, "parameters": model_kwargs } } ) def format_response_payload(self, output: bytes) -> str: """Formats response""" response_json = json.loads(output) if self.api == API.CHAT and "output" in response_json: return response_json["output"] elif self.api == API.COMPLETION and len(response_json) > 0 and "0" in response_json[0]: return response_json[0]["0"] else: error_message = f"Unexpected response format. Response: {response_json}" print(error_message, file=sys.stderr) raise OpenModelLLMOnlineEndpointError(message=error_message) class ServerlessLlamaContentFormatter(ContentFormatterBase): """Content formatter for LLaMa""" def __init__(self, api: API, chat_history: Optional[str] = ""): super().__init__() self.api = api self.chat_history = chat_history self.model_id = "llama-2-7b-hf" def format_request_payload(self, prompt: str, model_kwargs: Dict) -> str: """Formats the request according the the chosen api""" # Modify max_tokens key for serverless model_kwargs["max_tokens"] = model_kwargs["max_new_tokens"] if self.api == API.CHAT: messages = ContentFormatterBase.parse_chat(self.chat_history) base_body = { "model": self.model_id, "messages": messages, "n": 1, } base_body.update(model_kwargs) else: prompt_value = ContentFormatterBase.escape_special_characters(prompt) base_body = { "prompt": prompt_value, "n": 1, } base_body.update(model_kwargs) return json.dumps(base_body) def format_response_payload(self, output: bytes) -> str: """Formats response""" response_json = json.loads(output) if self.api == API.CHAT and "choices" in response_json: return response_json["choices"][0]["message"]["content"] elif self.api == API.COMPLETION and "choices" in response_json: return response_json["choices"][0]["text"] else: error_message = f"Unexpected response format. Response: {response_json}" print(error_message, file=sys.stderr) raise OpenModelLLMOnlineEndpointError(message=error_message) class ContentFormatterFactory: """Factory class for supported models""" def get_content_formatter( model_family: ModelFamily, api: API, chat_history: Optional[List[Dict]] = [], endpoint_url: Optional[str] = "" ) -> ContentFormatterBase: if model_family == ModelFamily.LLAMA: if is_serverless_endpoint(endpoint_url): return ServerlessLlamaContentFormatter(chat_history=chat_history, api=api) else: return LlamaContentFormatter(chat_history=chat_history, api=api) elif model_family in STANDARD_CONTRACT_MODELS: return MIRCompleteFormatter() class AzureMLOnlineEndpoint: """Azure ML Online Endpoint models.""" endpoint_url: str = "" """URL of pre-existing Endpoint. Should be passed to constructor or specified as env var `AZUREML_ENDPOINT_URL`.""" endpoint_api_key: str = "" """Authentication Key for Endpoint. Should be passed to constructor or specified as env var `AZUREML_ENDPOINT_API_KEY`.""" content_formatter: Any = None """The content formatter that provides an input and output transform function to handle formats between the LLM and the endpoint""" model_kwargs: Optional[Dict] = {} """Key word arguments to pass to the model.""" def __init__( self, endpoint_url: str, endpoint_api_key: str, content_formatter: ContentFormatterBase, model_family: ModelFamily, deployment_name: Optional[str] = None, model_kwargs: Optional[Dict] = {}, ): self.endpoint_url = endpoint_url self.endpoint_api_key = endpoint_api_key self.deployment_name = deployment_name self.content_formatter = content_formatter self.model_kwargs = model_kwargs self.model_family = model_family def _call_endpoint(self, request_body: str) -> str: """call.""" headers = { "Content-Type": "application/json", "Authorization": ("Bearer " + self.endpoint_api_key), "x-ms-user-agent": "PromptFlow/OpenModelLLM/" + self.model_family } # If this is not set it'll use the default deployment on the endpoint. if self.deployment_name is not None: headers["azureml-model-deployment"] = self.deployment_name result = requests.post(self.endpoint_url, data=request_body, headers=headers) if result.status_code != 200: error_message = f"""Request failure while calling Online Endpoint Status:{result.status_code} Error:{result.text}""" print(error_message, file=sys.stderr) raise OpenModelLLMOnlineEndpointError(message=error_message) return result.text def __call__( self, prompt: str ) -> str: """Call out to an AzureML Managed Online endpoint. Args: prompt: The prompt to pass into the model. Returns: The string generated by the model. Example: .. code-block:: python response = azureml_model("Tell me a joke.") """ request_body = self.content_formatter.format_request_payload(prompt, self.model_kwargs) endpoint_response = self._call_endpoint(request_body) response = self.content_formatter.format_response_payload(endpoint_response) return response class OpenModelLLM(ToolProvider): def __init__(self): super().__init__() def get_deployment_from_endpoint(self, credential, subscription_id: str, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str = None) -> Tuple[str, str, str]: endpoints_and_deployments = ENDPOINT_CONTAINER.get_endpoints_and_deployments( credential, subscription_id, resource_group_name, workspace_name) for ep in endpoints_and_deployments: if ep.endpoint_name == endpoint_name: if deployment_name is None: return (ep.endpoint_url, ep.endpoint_api_key, ep.default_deployment.model_family) for d in ep.deployments: if d.deployment_name == deployment_name: return (ep.endpoint_url, ep.endpoint_api_key, d.model_family) message = f"""Invalid endpoint and deployment values. Please ensure endpoint name and deployment names are correct, and the deployment was successfull. Could not find endpoint: {endpoint_name} and deployment: {deployment_name}""" raise OpenModelLLMUserError(message=message) def sanitize_endpoint_url(self, endpoint_url: str, api_type: API): if is_serverless_endpoint(endpoint_url): if api_type == API.CHAT: if not endpoint_url.endswith("/v1/chat/completions"): return endpoint_url + "/v1/chat/completions" else: if not endpoint_url.endswith("/v1/completions"): return endpoint_url + "/v1/completions" return endpoint_url def get_endpoint_details(self, subscription_id: str, resource_group_name: str, workspace_name: str, endpoint: str, api_type: API, deployment_name: str = None, **kwargs) -> Tuple[str, str, str]: if self.endpoint_values_in_kwargs(**kwargs): endpoint_url = kwargs["endpoint_url"] endpoint_api_key = kwargs["endpoint_api_key"] model_family = kwargs["model_family"] # clean these up, aka don't send them to MIR del kwargs["endpoint_url"] del kwargs["endpoint_api_key"] del kwargs["model_family"] return (endpoint_url, endpoint_api_key, model_family) (endpoint_connection_type, endpoint_connection_name) = parse_endpoint_connection_type(endpoint) print(f"endpoint_connection_type: {endpoint_connection_type} name: {endpoint_connection_name}", file=sys.stdout) con_type = endpoint_connection_type.lower() if con_type in AUTH_REQUIRED_CONNECTION_TYPES: try: from azure.identity import DefaultAzureCredential credential = DefaultAzureCredential(exclude_interactive_browser_credential=False) token = credential.get_token("https://management.azure.com/.default").token except Exception as e: message = f"""Error encountered while attempting to Authorize access to {endpoint}. Exception: {e}""" print(message, file=sys.stderr) raise OpenModelLLMUserError(message=message) if con_type == "serverlessendpoint": (endpoint_url, endpoint_api_key, model_family) = SERVERLESS_ENDPOINT_CONTAINER.get_serverless_endpoint_key( token, subscription_id, resource_group_name, workspace_name, endpoint_connection_name) elif con_type == "onlineendpoint": (endpoint_url, endpoint_api_key, model_family) = self.get_deployment_from_endpoint( credential, subscription_id, resource_group_name, workspace_name, endpoint_connection_name, deployment_name) elif con_type == "connection": (endpoint_url, endpoint_api_key, model_family) = CUSTOM_CONNECTION_CONTAINER.get_endpoint_from_azure_custom_connection( credential, subscription_id, resource_group_name, workspace_name, endpoint_connection_name) elif con_type == "localconnection": (endpoint_url, endpoint_api_key, model_family) = CUSTOM_CONNECTION_CONTAINER.get_endpoint_from_local_custom_connection( endpoint_connection_name) else: raise OpenModelLLMUserError(message=f"Invalid endpoint connection type: {endpoint_connection_type}") return (self.sanitize_endpoint_url(endpoint_url, api_type), endpoint_api_key, model_family) def endpoint_values_in_kwargs(self, **kwargs): # This is mostly for testing, suggest not using this since security\privacy concerns for the endpoint key if 'endpoint_url' not in kwargs and 'endpoint_api_key' not in kwargs and 'model_family' not in kwargs: return False if 'endpoint_url' not in kwargs or 'endpoint_api_key' not in kwargs or 'model_family' not in kwargs: message = """Endpoint connection via kwargs not fully set. If using kwargs, the following values must be set: endpoint_url, endpoint_api_key, and model_family""" raise OpenModelLLMKeyValidationError(message=message) return True @tool @handle_online_endpoint_error() def call( self, prompt: PromptTemplate, api: API, endpoint_name: str, deployment_name: Optional[str] = None, temperature: Optional[float] = 1.0, max_new_tokens: Optional[int] = 500, top_p: Optional[float] = 1.0, model_kwargs: Optional[Dict] = {}, **kwargs ) -> str: # Sanitize deployment name. Empty deployment name is the same as None. if deployment_name is not None: deployment_name = deployment_name.strip() if not deployment_name or deployment_name == DEPLOYMENT_DEFAULT: deployment_name = None print(f"Executing Open Model LLM Tool for endpoint: '{endpoint_name}', deployment: '{deployment_name}'", file=sys.stdout) (endpoint_url, endpoint_api_key, model_family) = self.get_endpoint_details( subscription_id=os.getenv("AZUREML_ARM_SUBSCRIPTION", None), resource_group_name=os.getenv("AZUREML_ARM_RESOURCEGROUP", None), workspace_name=os.getenv("AZUREML_ARM_WORKSPACE_NAME", None), endpoint=endpoint_name, api_type=api, deployment_name=deployment_name, **kwargs) prompt = render_jinja_template(prompt, trim_blocks=True, keep_trailing_newline=True, **kwargs) model_kwargs["top_p"] = top_p model_kwargs["temperature"] = temperature model_kwargs["max_new_tokens"] = max_new_tokens content_formatter = ContentFormatterFactory.get_content_formatter( model_family=model_family, api=api, chat_history=prompt, endpoint_url=endpoint_url ) llm = AzureMLOnlineEndpoint( endpoint_url=endpoint_url, endpoint_api_key=endpoint_api_key, model_family=model_family, content_formatter=content_formatter, deployment_name=deployment_name, model_kwargs=model_kwargs ) return llm(prompt)
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/exception.py
from openai import OpenAIError from promptflow.exceptions import ErrorTarget, SystemErrorException, UserErrorException openai_error_code_ref_message = "Error reference: https://platform.openai.com/docs/guides/error-codes/api-errors" def to_openai_error_message(e: Exception) -> str: ex_type = type(e).__name__ if str(e) == "<empty message>": msg = "The api key is invalid or revoked. " \ "You can correct or regenerate the api key of your connection." return f"OpenAI API hits {ex_type}: {msg}" # for models that do not support the `functions` parameter. elif "Unrecognized request argument supplied: functions" in str(e): msg = "Current model does not support the `functions` parameter. If you are using openai connection, then " \ "please use gpt-3.5-turbo, gpt-4, gpt-4-32k, gpt-3.5-turbo-0613 or gpt-4-0613. You can refer to " \ "https://platform.openai.com/docs/guides/gpt/function-calling. If you are using azure openai " \ "connection, then please first go to your Azure OpenAI resource, deploy model 'gpt-35-turbo' or " \ "'gpt-4' with version 0613, then go to prompt flow connection page, upgrade connection api version to " \ "'2023-07-01-preview'. You can refer to " \ "https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/function-calling." return f"OpenAI API hits {ex_type}: {msg}" elif "The completion operation does not work with the specified model" in str(e) or \ "logprobs, best_of and echo parameters are not available" in str(e): msg = "The completion operation does not work with the current model. " \ "Completion API is a legacy api and is going to be deprecated soon. " \ "Please change to use Chat API for current model. " \ "You could refer to guideline at https://aka.ms/pfdoc/chat-prompt " \ "or view the samples in our gallery that contain 'Chat' in the name." return f"OpenAI API hits {ex_type}: {msg}" elif "Invalid content type. image_url is only supported by certain models" in str(e): msg = "Current model does not support the image input. If you are using openai connection, then please use " \ "gpt-4-vision-preview. You can refer to https://platform.openai.com/docs/guides/vision." \ "If you are using azure openai connection, then please first go to your Azure OpenAI resource, " \ "create a GPT-4 Turbo with Vision deployment by selecting model name: \"gpt-4\" and "\ "model version \"vision-preview\". You can refer to " \ "https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/gpt-with-vision" return f"OpenAI API hits {ex_type}: {msg}" elif ("\'response_format\' of type" in str(e) and "is not supported with this model." in str(e))\ or ("Additional properties are not allowed" in str(e) and "unexpected) - \'response_format\'" in str(e)): msg = "The response_format parameter needs to be a dictionary such as {\"type\": \"text\"}. " \ "The value associated with the type key should be either 'text' or 'json_object' " \ "If you are using openai connection, you can only set response_format to { \"type\": \"json_object\" } " \ "when calling gpt-3.5-turbo-1106 or gpt-4-1106-preview to enable JSON mode. You can refer to " \ "https://platform.openai.com/docs/guides/text-generation/json-mode. If you are using azure openai " \ "connection, then please first go to your Azure OpenAI resource, deploy model 'gpt-35-turbo-1106' or " \ "'gpt-4-1106-preview'. You can refer to " \ "https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/json-mode?tabs=python." return f"OpenAI API hits {ex_type}: {msg}" else: return f"OpenAI API hits {ex_type}: {str(e)} [{openai_error_code_ref_message}]" class WrappedOpenAIError(UserErrorException): """Refine error messages on top of native openai errors.""" def __init__(self, ex: OpenAIError, **kwargs): self._ex = ex super().__init__(target=ErrorTarget.TOOL, **kwargs) @property def message(self): return str(to_openai_error_message(self._ex)) @property def error_codes(self): """The hierarchy of the error codes. We follow the "Microsoft REST API Guidelines" to define error codes in a hierarchy style. See the below link for details: https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses This list will be converted into an error code hierarchy by the prompt flow framework. For this case, it will be converted into a data structure that equivalent to: { "code": "UserError", "innerError": { "code": "OpenAIError", "innerError": { "code": self._ex.__class__.__name__, "innerError": None } } } """ return ["UserError", "OpenAIError", self._ex.__class__.__name__] class ExceedMaxRetryTimes(WrappedOpenAIError): """Base exception raised when retry exceeds max times.""" @property def message(self): return "Exceed max retry times. " + super().message class ToolValidationError(UserErrorException): """Base exception raised when failed to validate tool.""" def __init__(self, **kwargs): super().__init__(**kwargs, target=ErrorTarget.TOOL) class LLMError(UserErrorException): """Base exception raised when failed to call openai api with non-OpenAIError.""" def __init__(self, **kwargs): super().__init__(**kwargs, target=ErrorTarget.TOOL) class JinjaTemplateError(ToolValidationError): """Base exception raised when failed to render jinja template.""" pass class ChatAPIInvalidRole(ToolValidationError): """Base exception raised when failed to validate chat api role.""" pass class ChatAPIFunctionRoleInvalidFormat(ToolValidationError): """Base exception raised when failed to validate chat api function role format.""" pass class ChatAPIInvalidFunctions(ToolValidationError): """Base exception raised when failed to validate functions when call chat api.""" pass class FunctionCallNotSupportedInStreamMode(ToolValidationError): """Base exception raised when use functions parameter in stream mode when call chat api.""" pass class InvalidConnectionType(ToolValidationError): """Base exception raised when failed to pass invalid connection type.""" pass class SerpAPISystemError(SystemErrorException): """Base exception raised when failed to call serp api with system error.""" def __init__(self, **kwargs): super().__init__(**kwargs, target=ErrorTarget.TOOL) class SerpAPIUserError(UserErrorException): """Base exception raised when failed to call serp api with user error.""" def __init__(self, **kwargs): super().__init__(**kwargs, target=ErrorTarget.TOOL) class OpenModelLLMOnlineEndpointError(UserErrorException): """Base exception raised when the call to an online endpoint failed.""" def __init__(self, **kwargs): super().__init__(**kwargs, target=ErrorTarget.TOOL) class OpenModelLLMUserError(UserErrorException): """Base exception raised when the call to Open Model LLM failed with a user error.""" def __init__(self, **kwargs): super().__init__(**kwargs, target=ErrorTarget.TOOL) class OpenModelLLMKeyValidationError(ToolValidationError): """Base exception raised when failed to validate functions when call chat api.""" def __init__(self, **kwargs): super().__init__(**kwargs) class AzureContentSafetyInputValueError(UserErrorException): """Base exception raised when the input type of Azure Content Safety is invalid.""" def __init__(self, **kwargs): super().__init__(**kwargs, target=ErrorTarget.TOOL) class AzureContentSafetySystemError(SystemErrorException): """Base exception raised when failed to call Azure Content Safety api with system error.""" def __init__(self, **kwargs): super().__init__(**kwargs, target=ErrorTarget.TOOL)
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/__init__.py
from .aoai import AzureOpenAI # noqa: F401 from .openai import OpenAI # noqa: F401 from .serpapi import SerpAPI # noqa: F401
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/yamls/aoai_gpt4v.yaml
promptflow.tools.aoai_gpt4v.AzureOpenAI.chat: name: Azure OpenAI GPT-4 Turbo with Vision description: Use Azure OpenAI GPT-4 Turbo with Vision to leverage AOAI vision ability. type: custom_llm module: promptflow.tools.aoai_gpt4v class_name: AzureOpenAI function: chat tool_state: preview icon: light: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAx0lEQVR4nJWSwQ2CQBBFX0jAcjgqXUgPJNiIsQQrIVCIFy8GC6ABDcGDX7Mus9n1Xz7zZ+fPsLPwH4bUg0dD2wMPcbR48Uxq4AKU4iSTDwZ1LhWXipN/B3V0J6hjBTvgLHZNonewBXrgDpzEvXSIjN0BE3AACmmF4kl5F6tNzcCoLpW0SvGovFvsb4oZ2AANcAOu4ka6axCcINN3rg654sww+CYsPD0OwjcozFNh/Qcd78tqVbCIW+n+Fky472Bh/Q6SYb1EEy8tDzd+9IsVPAAAAABJRU5ErkJggg== dark: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAA2ElEQVR4nJXSzW3CQBAF4DUSTjk+Al1AD0ikESslpBIEheRALhEpgAYSWV8OGUublf/yLuP3PPNmdndS+gdwXZrYDmh7fGE/W+wXbaYd8IYm4rxJPnZ0boI3wZcdJxs/n+AwV7DFK7aFyfQdYIMLPvES8YJNf5yp4jMeeEYdWh38gXOR35YGHe5xabvQdsHv6PLi8qV6gycc8YH3iMfQu6Lh4ASr+F5Hh3XwVWnQYzUkVlX1nccplAb1SN6Y/sfgmlK64VS8wimldIv/0yj2QLkHizG0iWP4AVAfQ34DVQONAAAAAElFTkSuQmCC default_prompt: | # system: As an AI assistant, your task involves interpreting images and responding to questions about the image. Remember to provide accurate answers based on the information present in the image. # user: Can you tell me what the image depicts? ![image]({{image_input}}) inputs: connection: type: - AzureOpenAIConnection deployment_name: type: - string temperature: default: 1 type: - double top_p: default: 1 type: - double max_tokens: default: 512 type: - int stop: default: "" type: - list presence_penalty: default: 0 type: - double frequency_penalty: default: 0 type: - double
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/yamls/embedding.yaml
promptflow.tools.embedding.embedding: name: Embedding description: Use Open AI's embedding model to create an embedding vector representing the input text. type: python module: promptflow.tools.embedding function: embedding inputs: connection: type: [AzureOpenAIConnection, OpenAIConnection] deployment_name: type: - string enabled_by: connection enabled_by_type: [AzureOpenAIConnection] capabilities: completion: false chat_completion: false embeddings: true model_list: - text-embedding-ada-002 - text-search-ada-doc-001 - text-search-ada-query-001 model: type: - string enabled_by: connection enabled_by_type: [OpenAIConnection] enum: - text-embedding-ada-002 - text-search-ada-doc-001 - text-search-ada-query-001 allow_manual_entry: true input: type: - string
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/yamls/open_model_llm.yaml
promptflow.tools.open_model_llm.OpenModelLLM.call: name: Open Model LLM description: Use an open model from the Azure Model catalog, deployed to an AzureML Online Endpoint for LLM Chat or Completion API calls. icon: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAACgElEQVR4nGWSz2vcVRTFP/e9NzOZ1KDGohASslLEH6VLV0ak4l/QpeDCrfQPcNGliODKnVm4EBdBsIjQIlhciKW0ycKFVCSNbYnjdDLtmPnmO/nO9917XcxMkjYX3uLx7nnn3HOuMK2Nix4fP78ZdrYXVkLVWjf3l3B1B+HpcjzGFtmqa6cePz7/x0dnn1n5qhj3iBJPYREIURAJuCtpY8PjReDbrf9WG7H1fuefwQU9qKztTcMJT+PNnEFvjGVDBDlSsH6p/9MLzy6+NxwVqI8RAg4IPmWedMckdLYP6O6UpIaQfvyyXG012+e79/ZfHukoS1ISMT2hGTB1RkUmNgQ5QZ0w+a2VWDq73MbdEWmfnnv6UWe7oNzPaLapl5CwuLTXK9WUGBuCjqekzhP+z52ZXOrKMD3OJg0Hh778aiOuvpnYvp05d6GJO4iAO4QAe/eV36/X5LFRV4Zmn+AdkqlL8Vjp3oVioOz+WTPzzYEgsN+fgPLYyJVheSbPPVl2ikeGZRjtG52/8rHuaV9VOlpP2OtKyVndcRVCSqOhsvxa4vW359i6OuKdD+aP8Q4SYPdOzS/flGjt1JUSaMqZ5nwa1Y8qWb/Ud/eZZkHisYezEM0m+fcelDr8F1SqW2LNK6r1jXQwyLzy1hxvrLXZulry7ocL+FS6G4QIu3fG/Px1gdYeW7LIgXU2P/115TOA5G7e3Rmj2aS/m7l5pThiZzrCcE/d1XHzbln373nw7y6veeoUm5KCNKT/IPPwbiY1hYd/l5MIT65BMFt87sU4v9D7/JMflr44uV6hGh1+L4RCkg6z5iK2tAhNLeLsNGwYA4fDYnC/drvuuFxe86NV/x+Ut27g0FvykgAAAABJRU5ErkJggg== type: custom_llm module: promptflow.tools.open_model_llm class_name: OpenModelLLM function: call inputs: endpoint_name: type: - string dynamic_list: func_path: promptflow.tools.open_model_llm.list_endpoint_names allow_manual_entry: true # Allow the user to clear this field is_multi_select: false deployment_name: default: '' type: - string dynamic_list: func_path: promptflow.tools.open_model_llm.list_deployment_names func_kwargs: - name: endpoint type: - string optional: true reference: ${inputs.endpoint} allow_manual_entry: true is_multi_select: false api: enum: - chat - completion type: - string temperature: default: 1.0 type: - double max_new_tokens: default: 500 type: - int top_p: default: 1.0 advanced: true type: - double model_kwargs: default: "{}" advanced: true type: - object
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/yamls/serpapi.yaml
promptflow.tools.serpapi.SerpAPI.search: name: Serp API description: Use Serp API to obtain search results from a specific search engine. inputs: connection: type: - SerpConnection engine: default: google enum: - google - bing type: - string location: default: '' type: - string num: default: '10' type: - int query: type: - string safe: default: 'off' enum: - active - 'off' type: - string type: python module: promptflow.tools.serpapi class_name: SerpAPI function: search
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/yamls/openai_gpt4v.yaml
promptflow.tools.openai_gpt4v.OpenAI.chat: name: OpenAI GPT-4V description: Use OpenAI GPT-4V to leverage vision ability. type: custom_llm module: promptflow.tools.openai_gpt4v class_name: OpenAI function: chat tool_state: preview icon: light: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAx0lEQVR4nJWSwQ2CQBBFX0jAcjgqXUgPJNiIsQQrIVCIFy8GC6ABDcGDX7Mus9n1Xz7zZ+fPsLPwH4bUg0dD2wMPcbR48Uxq4AKU4iSTDwZ1LhWXipN/B3V0J6hjBTvgLHZNonewBXrgDpzEvXSIjN0BE3AACmmF4kl5F6tNzcCoLpW0SvGovFvsb4oZ2AANcAOu4ka6axCcINN3rg654sww+CYsPD0OwjcozFNh/Qcd78tqVbCIW+n+Fky472Bh/Q6SYb1EEy8tDzd+9IsVPAAAAABJRU5ErkJggg== dark: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAA2ElEQVR4nJXSzW3CQBAF4DUSTjk+Al1AD0ikESslpBIEheRALhEpgAYSWV8OGUublf/yLuP3PPNmdndS+gdwXZrYDmh7fGE/W+wXbaYd8IYm4rxJPnZ0boI3wZcdJxs/n+AwV7DFK7aFyfQdYIMLPvES8YJNf5yp4jMeeEYdWh38gXOR35YGHe5xabvQdsHv6PLi8qV6gycc8YH3iMfQu6Lh4ASr+F5Hh3XwVWnQYzUkVlX1nccplAb1SN6Y/sfgmlK64VS8wimldIv/0yj2QLkHizG0iWP4AVAfQ34DVQONAAAAAElFTkSuQmCC default_prompt: | # system: As an AI assistant, your task involves interpreting images and responding to questions about the image. Remember to provide accurate answers based on the information present in the image. # user: Can you tell me what the image depicts? ![image]({{image_input}}) inputs: connection: type: - OpenAIConnection model: enum: - gpt-4-vision-preview allow_manual_entry: true type: - string temperature: default: 1 type: - double top_p: default: 1 type: - double max_tokens: default: 512 type: - int stop: default: "" type: - list presence_penalty: default: 0 type: - double frequency_penalty: default: 0 type: - double
0
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools
promptflow_repo/promptflow/src/promptflow-tools/promptflow/tools/yamls/azure_content_safety.yaml
promptflow.tools.azure_content_safety.analyze_text: module: promptflow.tools.azure_content_safety function: analyze_text inputs: connection: type: - AzureContentSafetyConnection hate_category: default: medium_sensitivity enum: - disable - low_sensitivity - medium_sensitivity - high_sensitivity type: - string self_harm_category: default: medium_sensitivity enum: - disable - low_sensitivity - medium_sensitivity - high_sensitivity type: - string sexual_category: default: medium_sensitivity enum: - disable - low_sensitivity - medium_sensitivity - high_sensitivity type: - string text: type: - string violence_category: default: medium_sensitivity enum: - disable - low_sensitivity - medium_sensitivity - high_sensitivity type: - string name: Content Safety (Text Analyze) description: Use Azure Content Safety to detect harmful content. type: python deprecated_tools: - content_safety_text.tools.content_safety_text_tool.analyze_text
0
promptflow_repo/promptflow
promptflow_repo/promptflow/docs/index.md
--- myst: html_meta: "description lang=en": "Prompt flow Doc" "google-site-verification": "rEZN-2h5TVqEco07aaMpqNcDx4bjr2czx1Hwfoxydrg" html_theme.sidebar_secondary.remove: true --- # Prompt flow [**Prompt flow**](https://github.com/microsoft/promptflow) is a suite of development tools designed to streamline the end-to-end development cycle of LLM-based AI applications, from ideation, prototyping, testing, evaluation to production deployment and monitoring. It makes prompt engineering much easier and enables you to build LLM apps with production quality. With prompt flow, you will be able to: - **Create [flows](./concepts/concept-flows.md)** that link [LLMs](./reference/tools-reference/llm-tool.md), [prompts](./reference/tools-reference/prompt-tool.md), [Python](./reference/tools-reference/python-tool.md) code and other [tools](./concepts/concept-tools.md) together in a executable workflow. - **Debug and iterate your flows**, especially the interaction with LLMs with ease. - **Evaluate your flows**, calculate quality and performance metrics with larger datasets. - **Integrate the testing and evaluation into your CI/CD system** to ensure quality of your flow. - **Deploy your flows** to the serving platform you choose or integrate into your app's code base easily. - (Optional but highly recommended) **Collaborate with your team** by leveraging the cloud version of [Prompt flow in Azure AI](https://learn.microsoft.com/en-us/azure/machine-learning/prompt-flow/overview-what-is-prompt-flow?view=azureml-api-2). > Welcome to join us to make prompt flow better by > participating [discussions](https://github.com/microsoft/promptflow/discussions), > opening [issues](https://github.com/microsoft/promptflow/issues/new/choose), > submitting [PRs](https://github.com/microsoft/promptflow/pulls). This documentation site contains guides for prompt flow [sdk, cli](https://pypi.org/project/promptflow/) and [vscode extension](https://marketplace.visualstudio.com/items?itemName=prompt-flow.prompt-flow) users. ```{gallery-grid} :grid-columns: 1 2 2 2 - header: "πŸš€ Quick Start" content: " Quick start and end-to-end tutorials.<br/><br/> - [Getting started with prompt flow](how-to-guides/quick-start.md)<br/> - [E2E development tutorial: chat with PDF](https://github.com/microsoft/promptflow/blob/main/examples/tutorials/e2e-development/chat-with-pdf.md)<br/> - Find more: [tutorials & samples](tutorials/index.md)<br/> " - header: "πŸ“’ How-to Guides" content: " Articles guide user to complete a specific task in prompt flow.<br/><br/> - [Develop a flow](how-to-guides/develop-a-flow/index.md)<br/> - [Initialize and test a flow](how-to-guides/init-and-test-a-flow.md)<br/> - [Run and evaluate a flow](how-to-guides/run-and-evaluate-a-flow/index.md)<br/> - [Tune prompts using variants](how-to-guides/tune-prompts-with-variants.md)<br/> - [Develop custom tool](how-to-guides/develop-a-tool/create-and-use-tool-package.md)<br/> - [Deploy a flow](how-to-guides/deploy-a-flow/index.md)<br/> - [Process image in flow](how-to-guides/process-image-in-flow.md) " ``` ```{gallery-grid} :grid-columns: 1 2 2 2 - header: "πŸ“‘ Concepts" content: " Introduction of key concepts of prompt flow.<br/><br/> - [Flows](concepts/concept-flows.md)<br/> - [Tools](concepts/concept-tools.md)<br/> - [Connections](concepts/concept-connections.md)<br/> - [Design principles](concepts/design-principles.md)<br/> " - header: "πŸ” Reference" content: " Reference provides technical information about prompt flow API.<br/><br/> - Command line Interface reference: [pf](reference/pf-command-reference.md)<br/> - Python library reference: [promptflow](reference/python-library-reference/promptflow.md)<br/> - Tool reference: [LLM Tool](reference/tools-reference/llm-tool.md), [Python Tool](reference/tools-reference/python-tool.md), [Prompt Tool](reference/tools-reference/prompt-tool.md)<br/> " ``` ```{toctree} :hidden: :maxdepth: 1 how-to-guides/quick-start ``` ```{toctree} :hidden: :maxdepth: 1 how-to-guides/index ``` ```{toctree} :hidden: :maxdepth: 1 tutorials/index ``` ```{toctree} :hidden: :maxdepth: 2 concepts/index ``` ```{toctree} :hidden: :maxdepth: 1 reference/index ``` ```{toctree} :hidden: :maxdepth: 1 cloud/index ``` ```{toctree} :hidden: :maxdepth: 1 integrations/index ```
0
promptflow_repo/promptflow
promptflow_repo/promptflow/docs/README.md
# Promptflow documentation contribute guidelines This folder contains the source code for [prompt flow documentation site](https://microsoft.github.io/promptflow/). This readme file will not be included in above doc site. It keeps a guide for promptflow documentation contributors. ## Content Below is a table of important doc pages. | Category | Article | |----------------|----------------| |Quick start|[Getting started with prompt flow](./how-to-guides/quick-start.md)| |Concepts|[Flows](./concepts/concept-flows.md)<br> [Tools](./concepts/concept-tools.md)<br> [Connections](./concepts/concept-connections.md)<br> [Variants](./concepts/concept-variants.md)<br> | |How-to guides|[How to initialize and test a flow](./how-to-guides/init-and-test-a-flow.md) <br>[How to run and evaluate a flow](./how-to-guides/run-and-evaluate-a-flow/index.md)<br> [How to tune prompts using variants](./how-to-guides/tune-prompts-with-variants.md)<br>[How to deploy a flow](./how-to-guides/deploy-a-flow/index.md)<br>[How to create and use your own tool package](./how-to-guides/develop-a-tool/create-and-use-tool-package.md)| |Tools reference|[LLM tool](./reference/tools-reference/llm-tool.md)<br> [Prompt tool](./reference/tools-reference/prompt-tool.md)<br> [Python tool](./reference/tools-reference/python-tool.md)<br> [Embedding tool](./reference/tools-reference/embedding_tool.md)<br>[SERP API tool](./reference/tools-reference/serp-api-tool.md) || ## Writing tips 0. Reach the doc source repository by clicking `Edit this page` on any page. ![Edit this page](./media/edit-this-page.png) 1. Please use :::{admonition} for experimental feature or notes, and admonition with dropdown for the Limitation Part. 2. Please use ::::{tab-set} to group your sdk/cli example, and put the cli at first. Use :sync: to sync multiple tables . 3. If you are unclear with the above lines, refer to [get started](./how-to-guides/quick-start.md) to see the usage. 4. Add gif: Use [ScreenToGif](https://www.screentogif.com/) to record your screen, edit and save as a gif. 5. Reach more element style at [Sphinx Design Components](https://pydata-sphinx-theme.readthedocs.io/en/latest/user_guide/web-components.html). ## Preview your changes **Local build**: We suggest using local build at the beginning, as it's fast and efficiency. Please refer to [How to build doc site locally](./dev/documentation_guidelines.md#how-to-build-doc-site-locally). ## FAQ ### Adding image in doc Please use markdown syntax `![img desc](img link)` to reference image, because the relative path of image will be changed after sphinx build, and image placed in html tags can not be referenced when build. ### Draw flow chart in doc We recommend using the mermaid, learn more from the [mermaid syntax doc](https://mermaid-js.github.io/mermaid/#/./flowchart?id=flowcharts-basic-syntax) - Recommend to install [vscode extension](https://marketplace.visualstudio.com/items?itemName=bierner.markdown-mermaid) to preview graph in vscode. ## Reference - [md-and-rst](https://coderefinery.github.io/sphinx-lesson/md-and-rst/) - [sphinx-quickstart](https://www.sphinx-doc.org/en/master/usage/quickstart.html)
0
promptflow_repo/promptflow/docs/media/how-to-guides/how-to-enable-streaming-mode
promptflow_repo/promptflow/docs/media/how-to-guides/how-to-enable-streaming-mode/scripts/chat_app.py
from datetime import datetime import time import requests import sys import json from azure.identity import AzureCliCredential import logging from azure.ai.ml import MLClient from sseclient import SSEClient class ColoredFormatter(logging.Formatter): # Color code dictionary color_codes = { 'debug': '\033[0;32m', # Green 'info': '\033[0;36m', # Cyan 'warning': '\033[0;33m', # Yellow 'error': '\033[0;31m', # Red 'critical': '\033[0;35m', # Magenta } def format(self, record): # Get the original message message = super().format(record) # Add color codes message = f"{self.color_codes.get(record.levelname.lower(), '')}{message}\033[0m" return message logger = logging.getLogger(__name__) handler = logging.StreamHandler() handler.setFormatter(ColoredFormatter()) logger.setLevel(logging.INFO) logger.addHandler(handler) def apply_delta(base: dict, delta: dict): for k, v in delta.items(): if k in base: base[k] += v else: base[k] = v def score(url, api_key, body, stream=True, on_event=None): headers = { "Content-Type": "application/json", "Authorization": ("Bearer " + api_key), # The azureml-model-deployment header will force the request to go to a specific deployment. # Remove this header to have the request observe the endpoint traffic rules "azureml-model-deployment": "blue", "Accept": "text/event-stream, application/json" if stream else "application/json" } logger.info("Sending HTTP request...") logger.debug("POST %s", url) for name, value in headers.items(): if name == "Authorization": value = "[REDACTED]" logger.debug(f">>> {name}: {value}") logger.debug(json.dumps(body, indent=4, ensure_ascii=False)) logger.debug("") time1 = datetime.now() response = None try: response = requests.post(url, json=body, headers=headers, stream=stream) response.raise_for_status() finally: time2 = datetime.now() if response is not None: logger.info( "Got response: %d %s (elapsed %s)", response.status_code, response.reason, time2 - time1, ) for name, value in response.headers.items(): logger.debug(f"<<< {name}: {value}") time1 = datetime.now() try: content_type = response.headers.get('Content-Type') if "text/event-stream" in content_type: output = {} client = SSEClient(response) for event in client.events(): if on_event: on_event(event) dct = json.loads(event.data) apply_delta(output, dct) return output, True else: return response.json(), False finally: time2 = datetime.now() logger.info("\nResponse reading elapsed: %s", time2 - time1) class ChatApp: def __init__(self, ml_client, endpoint_name, chat_input_name, chat_output_name, stream=True, debug=False): self._chat_input_name = chat_input_name self._chat_output_name = chat_output_name self._chat_history = [] self._stream = stream if debug: logger.setLevel(logging.DEBUG) logger.info("Getting endpoint info...") endpoint = ml_client.online_endpoints.get(endpoint_name) keys = ml_client.online_endpoints.get_keys(endpoint_name) self._endpoint_url = endpoint.scoring_uri self._endpoint_key = keys.primary_key if endpoint.auth_mode == "key" else keys.access_token logger.info(f"Done.") logger.debug(f"Target endpoint: {endpoint.id}") @property def url(self): return self._endpoint_url @property def api_key(self): return self._endpoint_key def get_payload(self, chat_input, chat_history=[]): return { self._chat_input_name: chat_input, "chat_history": chat_history, } def chat_once(self, chat_input): def on_event(event): dct = json.loads(event.data) answer_delta = dct.get(self._chat_output_name) if answer_delta: print(answer_delta, end='') # We need to flush the output # otherwise the text does not appear on the console # unless a new line comes. sys.stdout.flush() # Sleep for 20ms for better animation effects time.sleep(0.02) try: payload = self.get_payload(chat_input=chat_input, chat_history=self._chat_history) output, stream = score(self.url, self.api_key, payload, stream=self._stream, on_event=on_event) # We don't use self._stream here since the result may not always be the same as self._stream specified. if stream: # Print a new line at the end of the content to make sure # the next logger line will always starts from a new line. pass # print("\n") else: print(output.get(self._chat_output_name, "<empty>")) self._chat_history.append({ "inputs": { self._chat_input_name: chat_input, }, "outputs": output, }) logger.info("Length of chat history: %s", len(self._chat_history)) except requests.HTTPError as e: logger.error(e.response.text) def chat(self): while True: try: question = input("Chat with Wikipedia:> ") if question in ("exit", "bye"): print("Bye.") break self.chat_once(question) except KeyboardInterrupt: # When pressed Ctrl_C, exit print("\nBye.") break except Exception as e: logger.exception("An error occurred: %s", e) # Do not raise the errors out so that we can continue the chat if __name__ == "__main__": ml_client = MLClient( credential=AzureCliCredential(), # Replace with your subscription ID, resource group name, and workspace name subscription_id="<your_sub_id>", resource_group_name="<your_resource_group_name>", workspace_name="<your_workspace_name>", ) chat_app = ChatApp( ml_client=ml_client, # TODO: Replace with your online endpoint name endpoint_name="chat-with-wikipedia-stream", chat_input_name="question", chat_output_name="answer", stream=False, debug=True, ) chat_app.chat()
0
promptflow_repo/promptflow/docs/media/how-to-guides/how-to-enable-streaming-mode
promptflow_repo/promptflow/docs/media/how-to-guides/how-to-enable-streaming-mode/scripts/requirements.txt
azure-identity azure-ai-ml sseclient-py
0